gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright (c) 2011-2013, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.alg.filter.convolve.down;
import boofcv.struct.convolve.Kernel1D_F32;
import boofcv.struct.convolve.Kernel1D_I32;
import boofcv.struct.convolve.Kernel2D_F32;
import boofcv.struct.convolve.Kernel2D_I32;
import boofcv.struct.image.*;
/**
* <p>
* Down convolution with kernel renormalization around image borders. Unoptimized naive implementation.
* </p>
*
* <p>
* NOTE: Do not modify. Automatically generated by {@link boofcv.alg.filter.convolve.normalized.GenerateConvolveNormalizedNaive}.
* </p>
*
* @author Peter Abeles
*/
public class ConvolveDownNormalizedNaive {
public static void horizontal(Kernel1D_F32 kernel, ImageFloat32 input, ImageFloat32 output , int skip ) {
final int radius = kernel.getRadius();
final int width = input.width - input.width % skip;
final int height = input.height;
for (int y = 0; y < height; y++) {
for( int x = 0; x < width; x += skip ) {
float total = 0;
float div = 0;
int startX = x - radius;
int endX = x + radius;
if( startX < 0 ) startX = 0;
if( endX >= input.width ) endX = input.width-1;
for( int j = startX; j <= endX; j++ ) {
float v = kernel.get(j-x+radius);
total += input.get(j,y)*v;
div += v;
}
output.set(x/skip,y, total/div );
}
}
}
public static void vertical(Kernel1D_F32 kernel, ImageFloat32 input, ImageFloat32 output , int skip ) {
final int radius = kernel.getRadius();
final int width = input.width;
final int height = input.height - input.height % skip;
for (int y = 0; y < height; y += skip) {
for( int x = 0; x < width; x++ ) {
float total = 0;
float div = 0;
int startY = y - radius;
int endY = y + radius;
if( startY < 0 ) startY = 0;
if( endY >= input.height ) endY = input.height-1;
for( int i = startY; i <= endY; i++ ) {
float v = kernel.get(i-y+radius);
total += input.get(x,i)*v;
div += v;
}
output.set(x,y/skip, total/div );
}
}
}
public static void convolve(Kernel2D_F32 kernel, ImageFloat32 input, ImageFloat32 output , int skip ) {
final int radius = kernel.getRadius();
final int width = input.width - input.width % skip;
final int height = input.height - input.height % skip;
for (int y = 0; y < height; y += skip ) {
for( int x = 0; x < width; x += skip ) {
int startX = x - radius;
int endX = x + radius;
if( startX < 0 ) startX = 0;
if( endX >= input.width ) endX = input.width-1;
int startY = y - radius;
int endY = y + radius;
if( startY < 0 ) startY = 0;
if( endY >= input.height ) endY = input.height-1;
float total = 0;
float div = 0;
for( int i = startY; i <= endY; i++ ) {
for( int j = startX; j <= endX; j++ ) {
float v = kernel.get(j-x+radius,i-y+radius);
total += input.get(j,i)*v;
div += v;
}
}
output.set(x/skip,y/skip, total/div );
}
}
}
public static void horizontal(Kernel1D_I32 kernel, ImageUInt8 input, ImageInt8 output , int skip ) {
final int radius = kernel.getRadius();
final int width = input.width - input.width % skip;
final int height = input.height;
for (int y = 0; y < height; y++) {
for( int x = 0; x < width; x += skip ) {
int total = 0;
int div = 0;
int startX = x - radius;
int endX = x + radius;
if( startX < 0 ) startX = 0;
if( endX >= input.width ) endX = input.width-1;
for( int j = startX; j <= endX; j++ ) {
int v = kernel.get(j-x+radius);
total += input.get(j,y)*v;
div += v;
}
output.set(x/skip,y, total/div );
}
}
}
public static void vertical(Kernel1D_I32 kernel, ImageUInt8 input, ImageInt8 output , int skip ) {
final int radius = kernel.getRadius();
final int width = input.width;
final int height = input.height - input.height % skip;
for (int y = 0; y < height; y += skip) {
for( int x = 0; x < width; x++ ) {
int total = 0;
int div = 0;
int startY = y - radius;
int endY = y + radius;
if( startY < 0 ) startY = 0;
if( endY >= input.height ) endY = input.height-1;
for( int i = startY; i <= endY; i++ ) {
int v = kernel.get(i-y+radius);
total += input.get(x,i)*v;
div += v;
}
output.set(x,y/skip, total/div );
}
}
}
public static void convolve(Kernel2D_I32 kernel, ImageUInt8 input, ImageInt8 output , int skip ) {
final int radius = kernel.getRadius();
final int width = input.width - input.width % skip;
final int height = input.height - input.height % skip;
for (int y = 0; y < height; y += skip ) {
for( int x = 0; x < width; x += skip ) {
int startX = x - radius;
int endX = x + radius;
if( startX < 0 ) startX = 0;
if( endX >= input.width ) endX = input.width-1;
int startY = y - radius;
int endY = y + radius;
if( startY < 0 ) startY = 0;
if( endY >= input.height ) endY = input.height-1;
int total = 0;
int div = 0;
for( int i = startY; i <= endY; i++ ) {
for( int j = startX; j <= endX; j++ ) {
int v = kernel.get(j-x+radius,i-y+radius);
total += input.get(j,i)*v;
div += v;
}
}
output.set(x/skip,y/skip, total/div );
}
}
}
public static void horizontal(Kernel1D_I32 kernel, ImageSInt16 input, ImageInt16 output , int skip ) {
final int radius = kernel.getRadius();
final int width = input.width - input.width % skip;
final int height = input.height;
for (int y = 0; y < height; y++) {
for( int x = 0; x < width; x += skip ) {
int total = 0;
int div = 0;
int startX = x - radius;
int endX = x + radius;
if( startX < 0 ) startX = 0;
if( endX >= input.width ) endX = input.width-1;
for( int j = startX; j <= endX; j++ ) {
int v = kernel.get(j-x+radius);
total += input.get(j,y)*v;
div += v;
}
output.set(x/skip,y, total/div );
}
}
}
public static void vertical(Kernel1D_I32 kernel, ImageSInt16 input, ImageInt16 output , int skip ) {
final int radius = kernel.getRadius();
final int width = input.width;
final int height = input.height - input.height % skip;
for (int y = 0; y < height; y += skip) {
for( int x = 0; x < width; x++ ) {
int total = 0;
int div = 0;
int startY = y - radius;
int endY = y + radius;
if( startY < 0 ) startY = 0;
if( endY >= input.height ) endY = input.height-1;
for( int i = startY; i <= endY; i++ ) {
int v = kernel.get(i-y+radius);
total += input.get(x,i)*v;
div += v;
}
output.set(x,y/skip, total/div );
}
}
}
public static void convolve(Kernel2D_I32 kernel, ImageSInt16 input, ImageInt16 output , int skip ) {
final int radius = kernel.getRadius();
final int width = input.width - input.width % skip;
final int height = input.height - input.height % skip;
for (int y = 0; y < height; y += skip ) {
for( int x = 0; x < width; x += skip ) {
int startX = x - radius;
int endX = x + radius;
if( startX < 0 ) startX = 0;
if( endX >= input.width ) endX = input.width-1;
int startY = y - radius;
int endY = y + radius;
if( startY < 0 ) startY = 0;
if( endY >= input.height ) endY = input.height-1;
int total = 0;
int div = 0;
for( int i = startY; i <= endY; i++ ) {
for( int j = startX; j <= endX; j++ ) {
int v = kernel.get(j-x+radius,i-y+radius);
total += input.get(j,i)*v;
div += v;
}
}
output.set(x/skip,y/skip, total/div );
}
}
}
}
| |
package squidpony.squidai;
import squidpony.squidgrid.FOV;
import squidpony.squidgrid.Radius;
import squidpony.squidgrid.mapping.DungeonUtility;
import squidpony.squidmath.Coord;
import java.util.*;
/**
* An AOE type that has a center and a radius, and uses shadowcasting to create a burst of rays from the center, out to
* the distance specified by radius. You can specify the RadiusType to Radius.DIAMOND for Manhattan distance,
* RADIUS.SQUARE for Chebyshev, or RADIUS.CIRCLE for Euclidean.
*
* This will produce doubles for its findArea() method which are equal to 1.0.
*
* This class uses squidpony.squidgrid.FOV to create its area of effect.
* Created by Tommy Ettinger on 7/13/2015.
*/
public class BurstAOE implements AOE {
private FOV fov;
private Coord center, origin;
private int radius;
private double[][] map;
private char[][] dungeon;
private Radius radiusType, limitType;
private int minRange = 1, maxRange = 1;
private Radius metric = Radius.SQUARE;
public BurstAOE(Coord center, int radius, Radius radiusType)
{
fov = new FOV(FOV.SHADOW);
this.center = center;
this.radius = radius;
this.radiusType = radiusType;
}
public BurstAOE(Coord center, int radius, Radius radiusType, int minRange, int maxRange)
{
fov = new FOV(FOV.SHADOW);
this.center = center;
this.radius = radius;
this.radiusType = radiusType;
this.minRange = minRange;
this.maxRange = maxRange;
}
private BurstAOE()
{
fov = new FOV(FOV.SHADOW);
center = Coord.get(1, 1);
radius = 1;
radiusType = Radius.DIAMOND;
}
public Coord getCenter() {
return center;
}
public void setCenter(Coord center) {
if (AreaUtils.verifyLimit(limitType, origin, center))
{
this.center = center;
}
}
public int getRadius() {
return radius;
}
public void setRadius(int radius) {
this.radius = radius;
}
public Radius getRadiusType() {
return radiusType;
}
public void setRadiusType(Radius radiusType) {
this.radiusType = radiusType;
}
@Override
public void shift(Coord aim) {
setCenter(aim);
}
@Override
public boolean mayContainTarget(Set<Coord> targets) {
for (Coord p : targets)
{
if(radiusType.radius(center.x, center.y, p.x, p.y) <= radius)
return true;
}
return false;
}
@Override
public LinkedHashMap<Coord, ArrayList<Coord>> idealLocations(Set<Coord> targets, Set<Coord> requiredExclusions) {
if(targets == null)
return new LinkedHashMap<Coord, ArrayList<Coord>>();
if(requiredExclusions == null) requiredExclusions = new LinkedHashSet<Coord>();
//requiredExclusions.remove(origin);
int totalTargets = targets.size();
LinkedHashMap<Coord, ArrayList<Coord>> bestPoints = new LinkedHashMap<Coord, ArrayList<Coord>>(totalTargets * 8);
if(totalTargets == 0)
return bestPoints;
if(radius == 0)
{
for(Coord p : targets)
{
ArrayList<Coord> ap = new ArrayList<Coord>();
ap.add(p);
bestPoints.put(p, ap);
}
return bestPoints;
}
Coord[] ts = targets.toArray(new Coord[targets.size()]);
Coord[] exs = requiredExclusions.toArray(new Coord[requiredExclusions.size()]);
Coord t = exs[0];
double[][][] compositeMap = new double[ts.length][dungeon.length][dungeon[0].length];
char[][] dungeonCopy = new char[dungeon.length][dungeon[0].length];
for (int i = 0; i < dungeon.length; i++) {
System.arraycopy(dungeon[i], 0, dungeonCopy[i], 0, dungeon[i].length);
}
double[][] tmpfov;
Coord tempPt = Coord.get(0, 0);
for (int i = 0; i < exs.length; ++i) {
t = exs[i];
tmpfov = fov.calculateFOV(map, t.x, t.y, radius, radiusType);
for (int x = 0; x < dungeon.length; x++) {
tempPt = tempPt.setX(x);
for (int y = 0; y < dungeon[x].length; y++) {
tempPt = tempPt.setY(y);
dungeonCopy[x][y] = (tmpfov[x][y] > 0.0 || !AreaUtils.verifyLimit(limitType, origin, tempPt)) ? '!' : dungeonCopy[x][y];
}
}
}
t = ts[0];
DijkstraMap.Measurement dmm = DijkstraMap.Measurement.MANHATTAN;
if(radiusType == Radius.SQUARE || radiusType == Radius.CUBE) dmm = DijkstraMap.Measurement.CHEBYSHEV;
else if(radiusType == Radius.CIRCLE || radiusType == Radius.SPHERE) dmm = DijkstraMap.Measurement.EUCLIDEAN;
for (int i = 0; i < ts.length; ++i) {
DijkstraMap dm = new DijkstraMap(dungeon, dmm);
t = ts[i];
tmpfov = fov.calculateFOV(map, t.x, t.y, radius, radiusType);
double dist = 0.0;
for (int x = 0; x < dungeon.length; x++) {
for (int y = 0; y < dungeon[x].length; y++) {
if (tmpfov[x][y] > 0.0) {
dist = metric.radius(origin.x, origin.y, x, y);
if(dist <= maxRange + radius && dist >= minRange - radius)
compositeMap[i][x][y] = dm.physicalMap[x][y];
else
compositeMap[i][x][y] = DijkstraMap.WALL;
}
else compositeMap[i][x][y] = DijkstraMap.WALL;
}
}
if(compositeMap[i][ts[i].x][ts[i].y] > DijkstraMap.FLOOR)
{
for (int x = 0; x < dungeon.length; x++) {
Arrays.fill(compositeMap[i][x], 99999.0);
}
continue;
}
dm.initialize(compositeMap[i]);
dm.setGoal(t);
dm.scan(null);
for (int x = 0; x < dungeon.length; x++) {
for (int y = 0; y < dungeon[x].length; y++) {
compositeMap[i][x][y] = (dm.gradientMap[x][y] < DijkstraMap.FLOOR && dungeonCopy[x][y] != '!') ? dm.gradientMap[x][y] : 99999.0;
}
}
}
double bestQuality = 99999 * ts.length;
double[][] qualityMap = new double[dungeon.length][dungeon[0].length];
for (int x = 0; x < qualityMap.length; x++) {
for (int y = 0; y < qualityMap[x].length; y++) {
qualityMap[x][y] = 0.0;
long bits = 0;
for (int i = 0; i < ts.length; ++i) {
qualityMap[x][y] += compositeMap[i][x][y];
if(compositeMap[i][x][y] < 99999.0 && i < 63)
bits |= 1 << i;
}
if(qualityMap[x][y] < bestQuality)
{
ArrayList<Coord> ap = new ArrayList<Coord>();
for (int i = 0; i < ts.length && i < 63; ++i) {
if((bits & (1 << i)) != 0)
ap.add(ts[i]);
}
if(ap.size() > 0) {
bestQuality = qualityMap[x][y];
bestPoints.clear();
bestPoints.put(Coord.get(x, y), ap);
} }
else if(qualityMap[x][y] == bestQuality)
{
ArrayList<Coord> ap = new ArrayList<Coord>();
for (int i = 0; i < ts.length && i < 63; ++i) {
if((bits & (1 << i)) != 0)
ap.add(ts[i]);
}
if (ap.size() > 0) {
bestPoints.put(Coord.get(x, y), ap);
}
}
}
}
return bestPoints;
}
@Override
public LinkedHashMap<Coord, ArrayList<Coord>> idealLocations(Set<Coord> priorityTargets, Set<Coord> lesserTargets, Set<Coord> requiredExclusions) {
if(priorityTargets == null)
return idealLocations(lesserTargets, requiredExclusions);
if(requiredExclusions == null) requiredExclusions = new LinkedHashSet<Coord>();
//requiredExclusions.remove(origin);
int totalTargets = priorityTargets.size() + lesserTargets.size();
LinkedHashMap<Coord, ArrayList<Coord>> bestPoints = new LinkedHashMap<Coord, ArrayList<Coord>>(totalTargets * 8);
if(totalTargets == 0)
return bestPoints;
if(radius == 0)
{
for(Coord p : priorityTargets)
{
ArrayList<Coord> ap = new ArrayList<Coord>();
ap.add(p);
bestPoints.put(p, ap);
}
return bestPoints;
}
Coord[] pts = priorityTargets.toArray(new Coord[priorityTargets.size()]);
Coord[] lts = lesserTargets.toArray(new Coord[lesserTargets.size()]);
Coord[] exs = requiredExclusions.toArray(new Coord[requiredExclusions.size()]);
Coord t = exs[0];
double[][][] compositeMap = new double[totalTargets][dungeon.length][dungeon[0].length];
char[][] dungeonCopy = new char[dungeon.length][dungeon[0].length],
dungeonPriorities = new char[dungeon.length][dungeon[0].length];
for (int i = 0; i < dungeon.length; i++) {
System.arraycopy(dungeon[i], 0, dungeonCopy[i], 0, dungeon[i].length);
Arrays.fill(dungeonPriorities[i], '#');
}
double[][] tmpfov;
Coord tempPt = Coord.get(0, 0);
for (int i = 0; i < exs.length; ++i) {
t = exs[i];
tmpfov = fov.calculateFOV(map, t.x, t.y, radius, radiusType);
for (int x = 0; x < dungeon.length; x++) {
tempPt = tempPt.setX(x);
for (int y = 0; y < dungeon[x].length; y++) {
tempPt = tempPt.setY(y);
dungeonCopy[x][y] = (tmpfov[x][y] > 0.0 || !AreaUtils.verifyLimit(limitType, origin, tempPt)) ? '!' : dungeonCopy[x][y];
}
}
}
t = pts[0];
DijkstraMap.Measurement dmm = DijkstraMap.Measurement.MANHATTAN;
if(radiusType == Radius.SQUARE || radiusType == Radius.CUBE) dmm = DijkstraMap.Measurement.CHEBYSHEV;
else if(radiusType == Radius.CIRCLE || radiusType == Radius.SPHERE) dmm = DijkstraMap.Measurement.EUCLIDEAN;
for (int i = 0; i < pts.length; ++i) {
DijkstraMap dm = new DijkstraMap(dungeon, dmm);
t = pts[i];
tmpfov = fov.calculateFOV(map, t.x, t.y, radius, radiusType);
double dist = 0.0;
for (int x = 0; x < dungeon.length; x++) {
for (int y = 0; y < dungeon[x].length; y++) {
if (tmpfov[x][y] > 0.0){
dist = metric.radius(origin.x, origin.y, x, y);
if(dist <= maxRange + radius && dist >= minRange - radius) {
compositeMap[i][x][y] = dm.physicalMap[x][y];
dungeonPriorities[x][y] = dungeon[x][y];
}
else
compositeMap[i][x][y] = DijkstraMap.WALL;
}
else compositeMap[i][x][y] = DijkstraMap.WALL;
}
}
if(compositeMap[i][t.x][t.y] > DijkstraMap.FLOOR)
{
for (int x = 0; x < dungeon.length; x++) {
Arrays.fill(compositeMap[i][x], 399999.0);
}
continue;
}
dm.initialize(compositeMap[i]);
dm.setGoal(t);
dm.scan(null);
for (int x = 0; x < dungeon.length; x++) {
for (int y = 0; y < dungeon[x].length; y++) {
compositeMap[i][x][y] = (dm.gradientMap[x][y] < DijkstraMap.FLOOR && dungeonCopy[x][y] != '!') ? dm.gradientMap[x][y] : 399999.0;
}
}
dm.resetMap();
dm.clearGoals();
}
t = lts[0];
for (int i = pts.length; i < totalTargets; ++i) {
DijkstraMap dm = new DijkstraMap(dungeon, dmm);
t = lts[i - pts.length];
tmpfov = fov.calculateFOV(map, t.x, t.y, radius, radiusType);
double dist = 0.0;
for (int x = 0; x < dungeon.length; x++) {
for (int y = 0; y < dungeon[x].length; y++) {
if (tmpfov[x][y] > 0.0){
dist = metric.radius(origin.x, origin.y, x, y);
if(dist <= maxRange + radius && dist >= minRange - radius)
compositeMap[i][x][y] = dm.physicalMap[x][y];
else
compositeMap[i][x][y] = DijkstraMap.WALL;
}
else compositeMap[i][x][y] = DijkstraMap.WALL;
}
}
if(compositeMap[i][t.x][t.y] > DijkstraMap.FLOOR)
{
for (int x = 0; x < dungeon.length; x++)
{
Arrays.fill(compositeMap[i][x], 99999.0);
}
continue;
}
dm.initialize(compositeMap[i]);
dm.setGoal(t);
dm.scan(null);
for (int x = 0; x < dungeon.length; x++) {
for (int y = 0; y < dungeon[x].length; y++) {
compositeMap[i][x][y] = (dm.gradientMap[x][y] < DijkstraMap.FLOOR && dungeonCopy[x][y] != '!' && dungeonPriorities[x][y] != '#') ? dm.gradientMap[x][y] : 99999.0;
}
}
dm.resetMap();
dm.clearGoals();
}
double bestQuality = 99999 * lts.length + 399999 * pts.length;
double[][] qualityMap = new double[dungeon.length][dungeon[0].length];
for (int x = 0; x < qualityMap.length; x++) {
for (int y = 0; y < qualityMap[x].length; y++) {
qualityMap[x][y] = 0.0;
long pbits = 0, lbits = 0;
for (int i = 0; i < pts.length; ++i) {
qualityMap[x][y] += compositeMap[i][x][y];
if(compositeMap[i][x][y] < 399999.0 && i < 63)
pbits |= 1 << i;
}
for (int i = pts.length; i < totalTargets; ++i) {
qualityMap[x][y] += compositeMap[i][x][y];
if(compositeMap[i][x][y] < 99999.0 && i < 63)
lbits |= 1 << i;
}
if(qualityMap[x][y] < bestQuality)
{
ArrayList<Coord> ap = new ArrayList<Coord>();
for (int i = 0; i < pts.length && i < 63; ++i) {
if((pbits & (1 << i)) != 0)
ap.add(pts[i]);
}
for (int i = pts.length; i < totalTargets && i < 63; ++i) {
if((lbits & (1 << i)) != 0)
ap.add(lts[i - pts.length]);
}
if(ap.size() > 0) {
bestQuality = qualityMap[x][y];
bestPoints.clear();
bestPoints.put(Coord.get(x, y), ap);
}
}
else if(qualityMap[x][y] == bestQuality)
{
ArrayList<Coord> ap = new ArrayList<Coord>();
for (int i = 0; i < pts.length && i < 63; ++i) {
if ((pbits & (1 << i)) != 0) {
ap.add(pts[i]);
ap.add(pts[i]);
ap.add(pts[i]);
ap.add(pts[i]);
}
}
for (int i = pts.length; i < totalTargets && i < 63; ++i) {
if((lbits & (1 << i)) != 0)
ap.add(lts[i - pts.length]);
}
if (ap.size() > 0) {
bestPoints.put(Coord.get(x, y), ap);
}
}
}
}
return bestPoints;
}
/*
@Override
public ArrayList<ArrayList<Coord>> idealLocations(Set<Coord> targets, Set<Coord> requiredExclusions) {
int totalTargets = targets.size() + 1;
int maxEffect = (int)radiusType.volume2D(radius);
ArrayList<ArrayList<Coord>> locs = new ArrayList<ArrayList<Coord>>(totalTargets);
for(int i = 0; i < totalTargets; i++)
{
locs.add(new ArrayList<Coord>(maxEffect));
}
if(totalTargets == 1)
return locs;
int ctr = 0;
if(radius < 1)
{
locs.get(totalTargets - 2).addAll(targets);
return locs;
}
boolean[][] tested = new boolean[dungeon.length][dungeon[0].length];
for (int x = 1; x < dungeon.length - 1; x += radius) {
BY_POINT:
for (int y = 1; y < dungeon[x].length - 1; y += radius) {
for(Coord ex : requiredExclusions)
{
if(radiusType.radius(x, y, ex.x, ex.y) <= radius)
continue BY_POINT;
}
ctr = 0;
for(Coord tgt : targets)
{
if(radiusType.radius(x, y, tgt.x, tgt.y) <= radius)
ctr++;
}
if(ctr > 0)
locs.get(totalTargets - ctr).add(Coord.get(x, y));
}
}
Coord it;
for(int t = 0; t < totalTargets - 1; t++)
{
if(locs.get(t).size() > 0) {
int numPoints = locs.get(t).size();
for (int i = 0; i < numPoints; i++) {
it = locs.get(t).get(i);
for (int x = Math.max(1, it.x - radius / 2); x < it.x + (radius + 1) / 2 && x < dungeon.length - 1; x++) {
BY_POINT:
for (int y = Math.max(1, it.y - radius / 2); y <= it.y + (radius - 1) / 2 && y < dungeon[0].length - 1; y++)
{
if(tested[x][y])
continue;
tested[x][y] = true;
for(Coord ex : requiredExclusions)
{
if(radiusType.radius(x, y, ex.x, ex.y) <= radius)
continue BY_POINT;
}
ctr = 0;
for(Coord tgt : targets)
{
if(radiusType.radius(x, y, tgt.x, tgt.y) <= radius)
ctr++;
}
if(ctr > 0)
locs.get(totalTargets - ctr).add(Coord.get(x, y));
}
}
}
}
}
return locs;
}
*/
@Override
public void setMap(char[][] map) {
this.map = DungeonUtility.generateResistances(map);
this.dungeon = map;
}
@Override
public LinkedHashMap<Coord, Double> findArea() {
return AreaUtils.arrayToHashMap(fov.calculateFOV(map, center.x, center.y, radius, radiusType));
}
@Override
public Coord getOrigin() {
return origin;
}
@Override
public void setOrigin(Coord origin) {
this.origin = origin;
}
@Override
public Radius getLimitType() {
return limitType;
}
@Override
public int getMinRange() {
return minRange;
}
@Override
public int getMaxRange() {
return maxRange;
}
@Override
public Radius getMetric() {
return metric;
}
@Override
public void setLimitType(Radius limitType) {
this.limitType = limitType;
}
@Override
public void setMinRange(int minRange) {
this.minRange = minRange;
}
@Override
public void setMaxRange(int maxRange) {
this.maxRange = maxRange;
}
@Override
public void setMetric(Radius metric) {
this.metric = metric;
}
}
| |
//
// Treasure Data Bulk-Import Tool in Java
//
// Copyright (C) 2012 - 2013 Muga Nishizawa
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package com.treasure_data.td_import.upload;
import java.util.Properties;
import com.treasure_data.td_import.Options;
import com.treasure_data.td_import.Configuration;
import com.treasuredata.client.TDClient;
public class UploadConfiguration extends UploadConfigurationBase {
public static class Factory {
protected Options options;
public Factory(Properties props) {
options = new Options();
options.initUploadOptionParser(props);
}
public Options getBulkImportOptions() {
return options;
}
public UploadConfiguration newUploadConfiguration(String[] args) {
options.setOptions(args);
UploadConfiguration c = new UploadConfiguration();
c.options = options;
return c;
}
}
protected boolean autoCreate = false;
protected String[] enableMake = null;
protected boolean autoPerform = false;
protected boolean autoCommit = false;
protected boolean autoDelete = false;
protected long waitSec;
public UploadConfiguration() {
super();
}
@Override
public UploadProcessorBase createNewUploadProcessor() {
TDClient c = createTDClient();
return new UploadProcessor(c, this);
}
@Override
public String showHelp(Properties props) {
boolean isAuto = Boolean.parseBoolean(props.getProperty(CMD_AUTO_ENABLE, "false"));
StringBuilder sbuf = new StringBuilder();
// usage
sbuf.append("usage:\n");
if (isAuto) {
sbuf.append(Configuration.CMD_AUTO_USAGE);
} else {
sbuf.append(Configuration.CMD_UPLOAD_USAGE);
}
sbuf.append("\n");
// example
sbuf.append("example:\n");
if (isAuto) {
sbuf.append(Configuration.CMD_AUTO_EXAMPLE);
} else {
sbuf.append(Configuration.CMD_UPLOAD_EXAMPLE);
}
sbuf.append("\n");
// description
sbuf.append("description:\n");
if (isAuto) {
sbuf.append(Configuration.CMD_AUTO_DESC);
} else {
sbuf.append(Configuration.CMD_UPLOAD_DESC);
}
sbuf.append("\n");
// options
sbuf.append("options:\n");
if (isAuto) {
sbuf.append(Configuration.CMD_AUTO_OPTIONS);
} else {
sbuf.append(Configuration.CMD_UPLOAD_OPTIONS);
}
sbuf.append("\n");
return sbuf.toString();
}
public void configure(Properties props, Options options) {
super.configure(props, options);
// auto-create-session
setAutoCreate();
// auto-perform
setAutoPerform();
// auto-commit
setAutoCommit();
// auto-delete-session
setAutoDelete();
}
public void setAutoPerform() {
autoPerform = optionSet.has(BI_UPLOAD_PARTS_AUTO_PERFORM);
}
public boolean autoPerform() {
return autoPerform;
}
public void setAutoCommit() {
autoCommit = optionSet.has(BI_UPLOAD_PARTS_AUTO_COMMIT);
}
public boolean autoCommit() {
return autoCommit;
}
public void setAutoCreate() {
if (optionSet.has(BI_UPLOAD_PARTS_AUTO_CREATE)) {
autoCreate = true;
enableMake = optionSet.valuesOf(BI_UPLOAD_PARTS_AUTO_CREATE).toArray(new String[0]);
if (enableMake.length != 2) {
throw new IllegalArgumentException(String.format(
"'%s' option argument must consists of database and table names e.g. 'testdb:testtbl'",
BI_UPLOAD_PARTS_AUTO_CREATE));
}
}
}
public boolean autoCreate() {
return autoCreate;
}
public String[] enableMake() {
return enableMake;
}
public void setAutoDelete() {
boolean ad = optionSet.has(BI_UPLOAD_PARTS_AUTO_DELETE);
if (ad) {
if (autoCommit) {
autoDelete = ad;
} else {
throw new IllegalArgumentException(String.format(
"'%s' option cannot be used without '%s' option.",
BI_UPLOAD_PARTS_AUTO_DELETE, BI_UPLOAD_PARTS_AUTO_COMMIT));
}
}
}
public boolean autoDelete() {
return autoDelete;
}
@Override
public void setNumOfUploadThreads() {
String num;
if (!optionSet.has(BI_UPLOAD_PARTS_PARALLEL)) {
num = BI_UPLOAD_PARTS_PARALLEL_DEFAULTVALUE;
} else {
num = (String) optionSet.valueOf(BI_UPLOAD_PARTS_PARALLEL);
}
try {
int n = Integer.parseInt(num);
if (n < 0) {
numOfUploadThreads = 2;
} else if (n > 9){
numOfUploadThreads = 8;
} else {
numOfUploadThreads = n;
}
} catch (NumberFormatException e) {
String msg = String.format(
"'int' value is required as '%s' option",
BI_UPLOAD_PARTS_PARALLEL);
throw new IllegalArgumentException(msg, e);
}
}
public long getWaitSec() {
return waitSec;
}
public Object clone() {
UploadConfiguration conf = new UploadConfiguration();
conf.props = props;
conf.autoCreate = autoCreate;
conf.autoPerform = autoPerform;
conf.autoCommit = autoCommit;
conf.autoDelete = autoDelete;
conf.numOfUploadThreads = numOfUploadThreads;
conf.retryCount= retryCount;
conf.waitSec = waitSec;
return conf;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.discovery.zen.fd;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.*;
import java.io.IOException;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import static org.elasticsearch.cluster.node.DiscoveryNodes.EMPTY_NODES;
import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap;
import static org.elasticsearch.transport.TransportRequestOptions.options;
/**
* A fault detection of multiple nodes.
*/
public class NodesFaultDetection extends FaultDetection {
public static final String PING_ACTION_NAME = "internal:discovery/zen/fd/ping";
public abstract static class Listener {
public void onNodeFailure(DiscoveryNode node, String reason) {}
public void onPingReceived(PingRequest pingRequest) {}
}
private final CopyOnWriteArrayList<Listener> listeners = new CopyOnWriteArrayList<>();
private final ConcurrentMap<DiscoveryNode, NodeFD> nodesFD = newConcurrentMap();
private volatile DiscoveryNodes latestNodes = EMPTY_NODES;
private volatile long clusterStateVersion = ClusterState.UNKNOWN_VERSION;
private volatile boolean running = false;
public NodesFaultDetection(Settings settings, ThreadPool threadPool, TransportService transportService, ClusterName clusterName) {
super(settings, threadPool, transportService, clusterName);
logger.debug("[node ] uses ping_interval [{}], ping_timeout [{}], ping_retries [{}]", pingInterval, pingRetryTimeout, pingRetryCount);
transportService.registerHandler(PING_ACTION_NAME, new PingRequestHandler());
}
public void addListener(Listener listener) {
listeners.add(listener);
}
public void removeListener(Listener listener) {
listeners.remove(listener);
}
public void updateNodes(DiscoveryNodes nodes, long clusterStateVersion) {
DiscoveryNodes prevNodes = latestNodes;
this.latestNodes = nodes;
this.clusterStateVersion = clusterStateVersion;
if (!running) {
return;
}
DiscoveryNodes.Delta delta = nodes.delta(prevNodes);
for (DiscoveryNode newNode : delta.addedNodes()) {
if (newNode.id().equals(nodes.localNodeId())) {
// no need to monitor the local node
continue;
}
if (!nodesFD.containsKey(newNode)) {
nodesFD.put(newNode, new NodeFD());
// we use schedule with a 0 time value to run the pinger on the pool as it will run on later
threadPool.schedule(TimeValue.timeValueMillis(0), ThreadPool.Names.SAME, new SendPingRequest(newNode));
}
}
for (DiscoveryNode removedNode : delta.removedNodes()) {
nodesFD.remove(removedNode);
}
}
public NodesFaultDetection start() {
if (running) {
return this;
}
running = true;
return this;
}
public NodesFaultDetection stop() {
if (!running) {
return this;
}
running = false;
return this;
}
public void close() {
super.close();
stop();
transportService.removeHandler(PING_ACTION_NAME);
}
@Override
protected void handleTransportDisconnect(DiscoveryNode node) {
if (!latestNodes.nodeExists(node.id())) {
return;
}
NodeFD nodeFD = nodesFD.remove(node);
if (nodeFD == null) {
return;
}
if (!running) {
return;
}
nodeFD.running = false;
if (connectOnNetworkDisconnect) {
try {
transportService.connectToNode(node);
nodesFD.put(node, new NodeFD());
// we use schedule with a 0 time value to run the pinger on the pool as it will run on later
threadPool.schedule(TimeValue.timeValueMillis(0), ThreadPool.Names.SAME, new SendPingRequest(node));
} catch (Exception e) {
logger.trace("[node ] [{}] transport disconnected (with verified connect)", node);
notifyNodeFailure(node, "transport disconnected (with verified connect)");
}
} else {
logger.trace("[node ] [{}] transport disconnected", node);
notifyNodeFailure(node, "transport disconnected");
}
}
private void notifyNodeFailure(final DiscoveryNode node, final String reason) {
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
for (Listener listener : listeners) {
listener.onNodeFailure(node, reason);
}
}
});
}
private void notifyPingReceived(final PingRequest pingRequest) {
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
for (Listener listener : listeners) {
listener.onPingReceived(pingRequest);
}
}
});
}
private class SendPingRequest implements Runnable {
private final DiscoveryNode node;
private SendPingRequest(DiscoveryNode node) {
this.node = node;
}
@Override
public void run() {
if (!running) {
return;
}
final PingRequest pingRequest = new PingRequest(node.id(), clusterName, latestNodes.localNode(), clusterStateVersion);
final TransportRequestOptions options = options().withType(TransportRequestOptions.Type.PING).withTimeout(pingRetryTimeout);
transportService.sendRequest(node, PING_ACTION_NAME, pingRequest, options, new BaseTransportResponseHandler<PingResponse>() {
@Override
public PingResponse newInstance() {
return new PingResponse();
}
@Override
public void handleResponse(PingResponse response) {
if (!running) {
return;
}
NodeFD nodeFD = nodesFD.get(node);
if (nodeFD != null) {
if (!nodeFD.running) {
return;
}
nodeFD.retryCount = 0;
threadPool.schedule(pingInterval, ThreadPool.Names.SAME, SendPingRequest.this);
}
}
@Override
public void handleException(TransportException exp) {
// check if the master node did not get switched on us...
if (!running) {
return;
}
NodeFD nodeFD = nodesFD.get(node);
if (nodeFD != null) {
if (!nodeFD.running) {
return;
}
if (exp instanceof ConnectTransportException || exp.getCause() instanceof ConnectTransportException) {
handleTransportDisconnect(node);
return;
}
int retryCount = ++nodeFD.retryCount;
logger.trace("[node ] failed to ping [{}], retry [{}] out of [{}]", exp, node, retryCount, pingRetryCount);
if (retryCount >= pingRetryCount) {
logger.debug("[node ] failed to ping [{}], tried [{}] times, each with maximum [{}] timeout", node, pingRetryCount, pingRetryTimeout);
// not good, failure
if (nodesFD.remove(node) != null) {
notifyNodeFailure(node, "failed to ping, tried [" + pingRetryCount + "] times, each with maximum [" + pingRetryTimeout + "] timeout");
}
} else {
// resend the request, not reschedule, rely on send timeout
transportService.sendRequest(node, PING_ACTION_NAME, pingRequest, options, this);
}
}
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
);
}
}
static class NodeFD {
volatile int retryCount;
volatile boolean running = true;
}
class PingRequestHandler extends BaseTransportRequestHandler<PingRequest> {
@Override
public PingRequest newInstance() {
return new PingRequest();
}
@Override
public void messageReceived(PingRequest request, TransportChannel channel) throws Exception {
// if we are not the node we are supposed to be pinged, send an exception
// this can happen when a kill -9 is sent, and another node is started using the same port
if (!latestNodes.localNodeId().equals(request.nodeId)) {
throw new ElasticsearchIllegalStateException("Got pinged as node [" + request.nodeId + "], but I am node [" + latestNodes.localNodeId() + "]");
}
// PingRequest will have clusterName set to null if it came from a node of version <1.4.0
if (request.clusterName != null && !request.clusterName.equals(clusterName)) {
// Don't introduce new exception for bwc reasons
throw new ElasticsearchIllegalStateException("Got pinged with cluster name [" + request.clusterName + "], but I'm part of cluster [" + clusterName + "]");
}
notifyPingReceived(request);
channel.sendResponse(new PingResponse());
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
public static class PingRequest extends TransportRequest {
// the (assumed) node id we are pinging
private String nodeId;
private ClusterName clusterName;
private DiscoveryNode masterNode;
private long clusterStateVersion = ClusterState.UNKNOWN_VERSION;
PingRequest() {
}
PingRequest(String nodeId, ClusterName clusterName, DiscoveryNode masterNode, long clusterStateVersion) {
this.nodeId = nodeId;
this.clusterName = clusterName;
this.masterNode = masterNode;
this.clusterStateVersion = clusterStateVersion;
}
public String nodeId() {
return nodeId;
}
public ClusterName clusterName() {
return clusterName;
}
public DiscoveryNode masterNode() {
return masterNode;
}
public long clusterStateVersion() {
return clusterStateVersion;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
nodeId = in.readString();
if (in.getVersion().onOrAfter(Version.V_1_4_0)) {
clusterName = ClusterName.readClusterName(in);
masterNode = DiscoveryNode.readNode(in);
clusterStateVersion = in.readLong();
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(nodeId);
if (out.getVersion().onOrAfter(Version.V_1_4_0)) {
clusterName.writeTo(out);
masterNode.writeTo(out);
out.writeLong(clusterStateVersion);
}
}
}
private static class PingResponse extends TransportResponse {
private PingResponse() {
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
}
}
}
| |
/*
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.HTablePool;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.rest.model.CellModel;
import org.apache.hadoop.hbase.rest.model.CellSetModel;
import org.apache.hadoop.hbase.rest.model.RowModel;
import org.apache.hadoop.hbase.util.Bytes;
public class RowResource extends ResourceBase {
private static final Log LOG = LogFactory.getLog(RowResource.class);
TableResource tableResource;
RowSpec rowspec;
/**
* Constructor
* @param tableResource
* @param rowspec
* @param versions
* @throws IOException
*/
public RowResource(TableResource tableResource, String rowspec,
String versions) throws IOException {
super();
this.tableResource = tableResource;
this.rowspec = new RowSpec(rowspec);
if (versions != null) {
this.rowspec.setMaxVersions(Integer.valueOf(versions));
}
}
@GET
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
public Response get(final @Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
ResultGenerator generator =
ResultGenerator.fromRowSpec(tableResource.getName(), rowspec, null);
if (!generator.hasNext()) {
throw new WebApplicationException(Response.Status.NOT_FOUND);
}
int count = 0;
CellSetModel model = new CellSetModel();
KeyValue value = generator.next();
byte[] rowKey = value.getRow();
RowModel rowModel = new RowModel(rowKey);
do {
if (!Bytes.equals(value.getRow(), rowKey)) {
model.addRow(rowModel);
rowKey = value.getRow();
rowModel = new RowModel(rowKey);
}
rowModel.addCell(new CellModel(value.getFamily(), value.getQualifier(),
value.getTimestamp(), value.getValue()));
if (++count > rowspec.getMaxValues()) {
break;
}
value = generator.next();
} while (value != null);
model.addRow(rowModel);
servlet.getMetrics().incrementSucessfulGetRequests(1);
return Response.ok(model).build();
} catch (IOException e) {
servlet.getMetrics().incrementFailedGetRequests(1);
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
}
}
@GET
@Produces(MIMETYPE_BINARY)
public Response getBinary(final @Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
servlet.getMetrics().incrementRequests(1);
// doesn't make sense to use a non specific coordinate as this can only
// return a single cell
if (!rowspec.hasColumns() || rowspec.getColumns().length > 1) {
throw new WebApplicationException(Response.Status.BAD_REQUEST);
}
try {
ResultGenerator generator =
ResultGenerator.fromRowSpec(tableResource.getName(), rowspec, null);
if (!generator.hasNext()) {
throw new WebApplicationException(Response.Status.NOT_FOUND);
}
KeyValue value = generator.next();
ResponseBuilder response = Response.ok(value.getValue());
response.header("X-Timestamp", value.getTimestamp());
servlet.getMetrics().incrementSucessfulGetRequests(1);
return response.build();
} catch (IOException e) {
servlet.getMetrics().incrementFailedGetRequests(1);
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
}
}
Response update(final CellSetModel model, final boolean replace) {
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
throw new WebApplicationException(Response.Status.FORBIDDEN);
}
HTablePool pool = servlet.getTablePool();
HTableInterface table = null;
try {
List<RowModel> rows = model.getRows();
List<Put> puts = new ArrayList<Put>();
for (RowModel row: rows) {
byte[] key = row.getKey();
if (key == null) {
key = rowspec.getRow();
}
if (key == null) {
throw new WebApplicationException(Response.Status.BAD_REQUEST);
}
Put put = new Put(key);
int i = 0;
for (CellModel cell: row.getCells()) {
byte[] col = cell.getColumn();
if (col == null) try {
col = rowspec.getColumns()[i++];
} catch (ArrayIndexOutOfBoundsException e) {
col = null;
}
if (col == null) {
throw new WebApplicationException(Response.Status.BAD_REQUEST);
}
byte [][] parts = KeyValue.parseColumn(col);
if (parts.length == 2 && parts[1].length > 0) {
put.add(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
} else {
put.add(parts[0], null, cell.getTimestamp(), cell.getValue());
}
}
puts.add(put);
if (LOG.isDebugEnabled()) {
LOG.debug("PUT " + put.toString());
}
}
table = pool.getTable(tableResource.getName());
table.put(puts);
table.flushCommits();
ResponseBuilder response = Response.ok();
servlet.getMetrics().incrementSucessfulPutRequests(1);
return response.build();
} catch (IOException e) {
servlet.getMetrics().incrementFailedPutRequests(1);
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
} finally {
if (table != null) {
try {
pool.putTable(table);
} catch (IOException ioe) {
throw new WebApplicationException(ioe,
Response.Status.SERVICE_UNAVAILABLE);
}
}
}
}
// This currently supports only update of one row at a time.
Response updateBinary(final byte[] message, final HttpHeaders headers,
final boolean replace) {
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
throw new WebApplicationException(Response.Status.FORBIDDEN);
}
HTablePool pool = servlet.getTablePool();
HTableInterface table = null;
try {
byte[] row = rowspec.getRow();
byte[][] columns = rowspec.getColumns();
byte[] column = null;
if (columns != null) {
column = columns[0];
}
long timestamp = HConstants.LATEST_TIMESTAMP;
List<String> vals = headers.getRequestHeader("X-Row");
if (vals != null && !vals.isEmpty()) {
row = Bytes.toBytes(vals.get(0));
}
vals = headers.getRequestHeader("X-Column");
if (vals != null && !vals.isEmpty()) {
column = Bytes.toBytes(vals.get(0));
}
vals = headers.getRequestHeader("X-Timestamp");
if (vals != null && !vals.isEmpty()) {
timestamp = Long.valueOf(vals.get(0));
}
if (column == null) {
throw new WebApplicationException(Response.Status.BAD_REQUEST);
}
Put put = new Put(row);
byte parts[][] = KeyValue.parseColumn(column);
if (parts.length == 2 && parts[1].length > 0) {
put.add(parts[0], parts[1], timestamp, message);
} else {
put.add(parts[0], null, timestamp, message);
}
table = pool.getTable(tableResource.getName());
table.put(put);
if (LOG.isDebugEnabled()) {
LOG.debug("PUT " + put.toString());
}
servlet.getMetrics().incrementSucessfulPutRequests(1);
return Response.ok().build();
} catch (IOException e) {
servlet.getMetrics().incrementFailedPutRequests(1);
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
} finally {
if (table != null) {
try {
pool.putTable(table);
} catch (IOException ioe) {
throw new WebApplicationException(ioe,
Response.Status.SERVICE_UNAVAILABLE);
}
}
}
}
@PUT
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
public Response put(final CellSetModel model,
final @Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("PUT " + uriInfo.getAbsolutePath());
}
return update(model, true);
}
@PUT
@Consumes(MIMETYPE_BINARY)
public Response putBinary(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
if (LOG.isDebugEnabled()) {
LOG.debug("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
return updateBinary(message, headers, true);
}
@POST
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
public Response post(final CellSetModel model,
final @Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("POST " + uriInfo.getAbsolutePath());
}
return update(model, false);
}
@POST
@Consumes(MIMETYPE_BINARY)
public Response postBinary(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
if (LOG.isDebugEnabled()) {
LOG.debug("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
}
return updateBinary(message, headers, false);
}
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
throw new WebApplicationException(Response.Status.FORBIDDEN);
}
Delete delete = null;
if (rowspec.hasTimestamp())
delete = new Delete(rowspec.getRow(), rowspec.getTimestamp(), null);
else
delete = new Delete(rowspec.getRow());
for (byte[] column: rowspec.getColumns()) {
byte[][] split = KeyValue.parseColumn(column);
if (rowspec.hasTimestamp()) {
if (split.length == 2 && split[1].length != 0) {
delete.deleteColumns(split[0], split[1], rowspec.getTimestamp());
} else {
delete.deleteFamily(split[0], rowspec.getTimestamp());
}
} else {
if (split.length == 2 && split[1].length != 0) {
delete.deleteColumns(split[0], split[1]);
} else {
delete.deleteFamily(split[0]);
}
}
}
HTablePool pool = servlet.getTablePool();
HTableInterface table = null;
try {
table = pool.getTable(tableResource.getName());
table.delete(delete);
servlet.getMetrics().incrementSucessfulDeleteRequests(1);
if (LOG.isDebugEnabled()) {
LOG.debug("DELETE " + delete.toString());
}
} catch (IOException e) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
} finally {
if (table != null) {
try {
pool.putTable(table);
} catch (IOException ioe) {
throw new WebApplicationException(ioe,
Response.Status.SERVICE_UNAVAILABLE);
}
}
}
return Response.ok().build();
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2015-16 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.imagegallery;
import com.google.common.collect.Sets;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import static java.util.Objects.isNull;
import java.util.Set;
import java.util.logging.Level;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.imageio.ImageIO;
import org.apache.commons.lang3.StringUtils;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Enum style singleton to provide utilities related to questions about a files
* type, and whether it should be supported in Image Gallery.
*
* TODO: refactor this to remove code that duplicates
* org.sleuthkit.autopsy.coreutils.ImageUtils
*/
public enum FileTypeUtils {
instance;
private static final Logger LOGGER = Logger.getLogger(FileTypeUtils.class.getName());
/**
* Set of specific mimetypes (as strings) that we should support(ie, include
* in db and show to user). These are in addition to all image/* or video/*
* types
*/
private static final Set<String> supportedMimeTypes = new HashSet<>();
/**
* Mimetypes that we disable because they create too many false positives (
* ie files that are not images to show up in Image Gallery)
*/
private static final Set<String> disabledMimeTypes = new HashSet<>();
/**
* set of specific mimetypes to support as videos, in addition to any type
* prefixed by video/
*/
private static final Set<String> videoMimeTypes = new HashSet<>();
/**
* set of extensions to support as images. lowercase without the period.
*/
private static final Set<String> imageExtensions = new HashSet<>();
/**
* set of extensions to support as videos. lowercase without the period.
*/
private static final Set<String> videoExtensions = new HashSet<>();
/**
* set of all extensions that we should support(ie, include in db and show
* to user). Initialized to be the concatenation of imageExtensions and
* videoExtensions sets.
*/
private static final Set<String> supportedExtensions;
/**
* Lazily instantiated FileTypeDetector to use when the mimetype of a file
* is needed
*/
private static FileTypeDetector FILE_TYPE_DETECTOR;
/**
* static initalizer block to initialize sets of extensions and mimetypes to
* be supported
*/
static {
ImageIO.scanForPlugins();
//add all extension ImageIO claims to support
imageExtensions.addAll(Stream.of(ImageIO.getReaderFileSuffixes())
.map(String::toLowerCase)
.collect(Collectors.toList()));
//add list of known image extensions
imageExtensions.addAll(Arrays.asList(
"bmp", //Bitmap NON-NLS
"gif", //gif NON-NLS
"jpg", "jpeg", "jpe", "jp2", "jpx", //jpeg variants NON-NLS
"pbm", "pgm", "ppm",// Portable image format variants NON-NLS
"png", //portable network graphic NON-NLS
"tga", //targa NON-NLS
"psd", //photoshop NON-NLS
"tif", "tiff", //tiff variants NON-NLS
"yuv", "ico", //icons NON-NLS
"ai", //illustrator NON-NLS
"svg", //scalable vector graphics NON-NLS
"sn", "ras", //sun raster NON-NLS
"ico", //windows icons NON-NLS
"tga", //targa NON-NLS
"wmf", "emf", // windows meta file NON-NLS
"wmz", "emz" //compressed windows meta file NON-NLS
));
//add list of known video extensions
videoExtensions.addAll(Arrays.asList("fxm", "aaf", "3gp", "asf", "avi", //NON-NLS
"m1v", "m2v", "m4v", "mp4", "mov", "mpeg", "mpg", "mpe", "mp4", //NON-NLS
"rm", "wmv", "mpv", "flv", "swf")); //NON-NLS
supportedExtensions = Sets.union(imageExtensions, videoExtensions);
//add list of mimetypes to count as videos even though they aren't prefixed by video/
videoMimeTypes.addAll(Arrays.asList("application/x-shockwave-flash")); //NON-NLS
supportedMimeTypes.addAll(videoMimeTypes);
/*
* TODO: windows .cur cursor files get misidentified as
* application/x-123, so we claim to support application/x-123 so we
* don't miss them: ie this is a hack to cover another bug. when this is
* fixed, we should remove application/x-123 from the list of supported
* mime types.
*/
supportedMimeTypes.addAll(Arrays.asList("application/x-123")); //NON-NLS
supportedMimeTypes.addAll(Arrays.asList("application/x-wmf")); //NON-NLS
/*
* We could support application/x-emf, but many files get mis-identified
* as it and so supporting it causes many false positive( ie files that
* are not images) to show up in Image Gallery.
* supportedMimeTypes.addAll(Arrays.asList("application/x-emf"));
*/
//add list of mimetypes ImageIO claims to support
supportedMimeTypes.addAll(Stream.of(ImageIO.getReaderMIMETypes())
.map(String::toLowerCase)
.collect(Collectors.toList()));
/**
* Many non image files get misidentified as image/vnd.microsoft.icon
* and application/x-emf, and show up in Image Gallery as 'false
* positives'.
*/
disabledMimeTypes.addAll(Arrays.asList("application/octet-stream",//this is rarely usefull //NON-NLS
"image/vnd.microsoft.icon",
"application/x-emf"));
supportedMimeTypes.removeAll(disabledMimeTypes);
}
public static Set<String> getAllSupportedMimeTypes() {
return Collections.unmodifiableSet(supportedMimeTypes);
}
static Set<String> getAllSupportedExtensions() {
return Collections.unmodifiableSet(supportedExtensions);
}
static synchronized FileTypeDetector getFileTypeDetector() throws FileTypeDetector.FileTypeDetectorInitException {
/*
* TODO: EUR-740 recreate FileTypeDetector when the user creates new
* user defined file types
*/
if (isNull(FILE_TYPE_DETECTOR)) {
FILE_TYPE_DETECTOR = new FileTypeDetector();
}
return FILE_TYPE_DETECTOR;
}
/**
* is the given file supported by image analyzer? ie, does it have a
* supported mime type (image/*, or video/*). if no mime type is found, does
* it have a supported extension or a jpeg/png header?
*
* @param file
*
* @return true if this file is supported or false if not
*/
public static boolean isDrawable(AbstractFile file) throws FileTypeDetector.FileTypeDetectorInitException {
return hasDrawableMIMEType(file);
}
static boolean isDrawableMimeType(String mimeType) {
if (StringUtils.isBlank(mimeType)) {
return false;
}
String mimeTypeLower = mimeType.toLowerCase();
return (disabledMimeTypes.contains(mimeTypeLower) == false)
&& (mimeTypeLower.startsWith("image/")
|| mimeTypeLower.startsWith("video/")
|| supportedMimeTypes.contains(mimeTypeLower));
}
/**
*
* TODO: EUR-740 recreate FileTypeDetector when the user creates new user
* defined file types
*
* does the given file have drawable/supported mime type
*
* @param file
*
* @return an Optional containg: True if the file has an image or video mime
* type. False if a non image/video mimetype. empty Optional if a
* mimetype could not be detected.
*/
static boolean hasDrawableMIMEType(AbstractFile file) {
String mimeType = file.getMIMEType();
if (mimeType == null) {
return false;
}
mimeType = mimeType.toLowerCase();
return isDrawableMimeType(mimeType) || (mimeType.equals("audio/x-aiff") && "tiff".equalsIgnoreCase(file.getNameExtension()));
}
/**
* is the given file a video
*
* @param file
*
* @return true if the given file has a video mime type (video/*,
* application/x-shockwave-flash, etc) or, if no mimetype is
* available, a video extension.
*/
public static boolean hasVideoMIMEType(AbstractFile file) {
String mimeType = file.getMIMEType();
if (mimeType == null) {
return false;
}
mimeType = mimeType.toLowerCase();
return mimeType.startsWith("video/") || videoMimeTypes.contains(mimeType);
}
/**
* Get the unique path for the content, or if that fails, just return the
* name.
*
* @param content
*
* @return
*/
static String getContentPathSafe(Content content) {
try {
return content.getUniquePath();
} catch (TskCoreException tskCoreException) {
String contentName = content.getName();
LOGGER.log(Level.SEVERE, "Failed to get unique path for " + contentName, tskCoreException); //NOI18N NON-NLS
return contentName;
}
}
}
| |
/**
* Copyright (C) 2015 Born Informatik AG (www.born.ch)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wte4j.impl.word;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.io.InputStream;
import javax.xml.bind.JAXBException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.docx4j.XmlUtils;
import org.docx4j.jaxb.Context;
import org.docx4j.wml.RPr;
import org.docx4j.wml.SdtBlock;
import org.docx4j.wml.SdtElement;
import org.docx4j.wml.SdtRun;
import org.docx4j.wml.Tag;
import org.junit.Test;
public class PlainTextContentTest {
@Test
public void testCreateSdtBlockElement() {
SdtElement element = PlainTextContent.createSdtBlockElement();
String text = XmlUtils.marshaltoString(element, true);
String expectedContent = "<w:sdtPr>" //
+ "<w:tag/>"//
+ "<w:text/>"//
+ "<w:showingPlcHdr/>"//
+ "<w15:appearance w15:val=\"tags\"/>"//
+ "</w:sdtPr>"//
+ "<w:sdtContent/>";
assertTrue(text.contains(expectedContent));
}
@Test
public void testFormatTextOfBlock() throws Exception {
SdtElement contentControl = createPlainTextContentControl(SdtBlock.class);
RPr rPr = createRunProperties("<w:b/>");
contentControl.getSdtPr().getRPrOrAliasOrLock().add(rPr);
PlainTextContent plainTextContent = new PlainTextContent(contentControl);
plainTextContent.setContent("content");
String xmlString = XmlUtils.marshaltoString(
plainTextContent.getXmlElement(), true);
String content = getSdtContent(xmlString);
String expectedContent = "<w:p>"//
+ "<w:r>" //
+ "<w:rPr><w:b/></w:rPr>" // formatierung
+ "<w:t>content</w:t>" // text
+ "</w:r>"//
+ "</w:p>";
assertEquals(expectedContent, content);
}
@Test
public void testFormatTextOfRun() throws Exception {
SdtElement contentControl = createPlainTextContentControl(SdtRun.class);
RPr rPr = createRunProperties("<w:b/>");
contentControl.getSdtPr().getRPrOrAliasOrLock().add(rPr);
PlainTextContent plainTextContent = new PlainTextContent(contentControl);
plainTextContent.setContent("content");
String xmlString = XmlUtils.marshaltoString(
plainTextContent.getXmlElement(), true);
String content = getSdtContent(xmlString);
String expectedContent = "<w:r>"//
+ "<w:rPr><w:b/></w:rPr>" // formatierung
+ "<w:t>content</w:t>" // text
+ "</w:r>";
assertEquals(expectedContent, content);
}
@Test
public void testGetExpression() throws Exception {
String expressionString = "expression";
Tag tag = new Tag();
tag.setVal(expressionString);
SdtElement element = createPlainTextContentControl(SdtBlock.class);
element.getSdtPr().setTag(tag);
PlainTextContent plainTextContent = new PlainTextContent(element);
assertEquals(expressionString, plainTextContent.getExpression());
}
@Test
public void testSetExpression() throws Exception {
SdtElement element = createPlainTextContentControl(SdtBlock.class);
PlainTextContent plainTextContent = new PlainTextContent(element);
plainTextContent.setExpression("expression");
String xmlString = XmlUtils.marshaltoString(
plainTextContent.getXmlElement(), true);
assertTrue(xmlString.contains("<w:tag w:val=\"expression\"/>"));
}
@Test()
public void testExpressionNotSet() throws Exception {
SdtElement element = createPlainTextContentControl(SdtBlock.class);
element.getSdtPr().setTag(null);
PlainTextContent plainTextContent = new PlainTextContent(element);
assertTrue(StringUtils.isEmpty(plainTextContent.getExpression()));
}
@Test
public void testhasExpression() throws Exception {
SdtElement element = createPlainTextContentControl(SdtBlock.class);
Tag tag = new Tag();
tag.setVal("tag");
element.getSdtPr().setTag(tag);
PlainTextContent plainTextContent = new PlainTextContent(element);
assertTrue(plainTextContent.hasExpression());
}
@Test()
public void testhasNoExpression() throws Exception {
SdtElement element = createPlainTextContentControl(SdtBlock.class);
Tag tag = new Tag();
element.getSdtPr().setTag(tag);
PlainTextContent plainTextContent = new PlainTextContent(element);
assertFalse(plainTextContent.hasExpression());
}
@Test
public void testSetTitle() throws Exception {
SdtElement element = createPlainTextContentControl(SdtBlock.class);
PlainTextContent plainTextContent = new PlainTextContent(element);
plainTextContent.setTitle("testLabel");
String xmlString = XmlUtils.marshaltoString(
plainTextContent.getXmlElement(), true);
assertTrue(xmlString + " has no alias set",
xmlString.contains("<w:alias w:val=\"testLabel\"/>"));
}
@Test
public void testSetPlaceHolderText() throws Exception {
SdtElement element = createPlainTextContentControl(SdtBlock.class);
PlainTextContent plainTextContent = new PlainTextContent(element);
plainTextContent.setPlaceHolderContent("placeHolderText");
String xmlString = XmlUtils.marshaltoString(
plainTextContent.getXmlElement(), true);
int startContent = xmlString.indexOf("<w:sdtContent>")
+ "<w:sdtContent>".length();
int endContent = xmlString.lastIndexOf("</w:sdtContent>");
String content = xmlString.substring(startContent, endContent);
String expectedContent = "<w:p>"//
+ "<w:r>"//
+ "<w:rPr><w:rStyle w:val=\"PlaceholderText\"/></w:rPr>"//
+ "<w:t>placeHolderText</w:t>"//
+ "</w:r>"//
+ "</w:p>";
assertEquals(expectedContent, content);
}
private static <E extends SdtElement> E createPlainTextContentControl(
Class<E> type) throws JAXBException, IOException {
InputStream in = ClassLoader
.getSystemResourceAsStream("org/wte4j/impl/word/PlainTextContentControl.xml");
try {
String xml = IOUtils.toString(in);
return (E) XmlUtils.unmarshalString(xml, Context.jc, type);
} finally {
in.close();
}
}
private static RPr createRunProperties(String rPrContentAsXml)
throws JAXBException {
String rPrXml = "<w:rPr xmlns:w=\"http://schemas.openxmlformats.org/wordprocessingml/2006/main\">"
+ rPrContentAsXml + "</w:rPr>";
return (RPr) XmlUtils.unmarshalString(rPrXml, Context.jc, RPr.class);
}
private String getSdtContent(String xmlString) {
int startContent = xmlString.indexOf("<w:sdtContent>")
+ "<w:sdtContent>".length();
int endContent = xmlString.indexOf("</w:sdtContent>", startContent);
return xmlString.substring(startContent, endContent);
}
}
| |
// Copyright (c) 2007 Per M.A. Bothner.
// This is free software; for terms and warranty disclaimer see ../../COPYING.
package gnu.text;
import java.io.*;
import java.net.*;
import gnu.mapping.*;
/* #ifdef JAVA7 */
import java.nio.file.Files;
/* #endif */
/** A generalized path/location, including File and URIs. */
public abstract class Path
/* #ifdef JAVA6 */
implements javax.tools.FileObject
/* #endif */
{
/** This is equivalent to the System {@code "user.dir"} property.
* However, the getProperty is tracked dynamically and resolved
* as needed. */
public static final FilePath userDirPath =
FilePath.valueOf(new File("."));
public static Path defaultPath = userDirPath;
/* #ifdef JAVA2 */
private static ThreadLocal<Path> pathLocation = new ThreadLocal<Path>();
/* #endif */
protected Path ()
{
}
public static Path currentPath ()
{
/* #ifdef JAVA2 */
Path path = pathLocation.get();
if (path != null)
return path;
/* #endif */
return defaultPath;
}
public static void setCurrentPath (Path path)
{
/* #ifdef JAVA2 */
pathLocation.set(path);
/* #else */
// defaultPath = path;
/* #endif */
}
public static Path coerceToPathOrNull (Object path)
{
if (path instanceof Path)
return (Path) path;
if (path instanceof URL)
return URLPath.valueOf((URL) path);
/* #ifdef use:java.net.URI */
if (path instanceof URI)
return URIPath.valueOf((URI) path);
/* #endif */
if (path instanceof File)
return FilePath.valueOf((File) path);
String str;
if (path instanceof gnu.lists.FString) // FIXME: || UntypedAtomic
str = path.toString();
else if (! (path instanceof String))
return null;
else
str = (String) path;
if (Path.uriSchemeSpecified(str))
return URIPath.valueOf(str);
else
return FilePath.valueOf(str);
}
public static Path valueOf (Object arg)
{
Path path = coerceToPathOrNull(arg);
if (path == null)
throw new WrongType((String) null, WrongType.ARG_CAST, arg, "path");
return path;
}
public static URL toURL (String str)
{
try
{
if (! Path.uriSchemeSpecified(str))
{
Path cur = currentPath();
Path path = cur.resolve(str);
if (path.isAbsolute())
return path.toURL();
str = path.toString();
}
return new URL(str);
}
catch (Throwable ex)
{
throw WrappedException.wrapIfNeeded(ex);
}
}
/** Helper routine to get the scheme part of a URI.
* The scheme part is "http:" or "file:" or "ftp:" most commonly.
* This functions searches for the first ':' that doesn't follow a '/'.
* @return The length of the scheme component, not counting the colon,
* (or alternatively the index of the colon), or -1 if the is no scheme.
*/
public static int uriSchemeLength (String uri)
{
int len = uri.length();
for (int i = 0; i < len; i++)
{
char ch = uri.charAt(i);
if (ch == ':')
return i;
if (i == 0 ? ! Character.isLetter(ch)
: (! Character.isLetterOrDigit(ch)
&& ch != '+' && ch != '-' && ch != '.'))
return -1;
}
return -1;
}
/** Tests if a URL has a scheme.
* For convenience, we treat a 1-character "scheme" as an
* MS-DOS-style "drive letter" - i.e. not a scheme. */
public static boolean uriSchemeSpecified (String name)
{
int ulen = uriSchemeLength(name);
if (ulen == 1 && File.separatorChar == '\\')
{
char drive = name.charAt(0);
return ! ((drive >= 'a' && drive <= 'z')
|| (drive >= 'A' && drive <= 'Z'));
}
return ulen > 0;
}
public abstract boolean isAbsolute ();
/** Does this path name a directory?
* The default implementation returns true only if the path ends
* with '/' or the separatorChar.
*/
public boolean isDirectory ()
{
String str = toString();
int len = str.length();
if (len > 0)
{
char last = str.charAt(len - 1);
if (last == '/' || last == File.separatorChar)
return true;
}
return false;
}
public boolean delete ()
{
return false;
}
public boolean exists ()
{
return getLastModified() != 0;
}
public abstract long getLastModified ();
public long getContentLength ()
{
return -1;
}
public abstract String getScheme ();
public String getAuthority ()
{
return null;
}
public String getUserInfo ()
{
return null;
}
public String getHost ()
{
return null;
}
public abstract String getPath ();
public Path getDirectory ()
{
if (isDirectory())
return this;
else
return resolve("");
}
public Path getParent ()
{
return resolve(isDirectory() ? ".." : "");
}
public String getLast ()
{
String p = getPath();
if (p == null)
return null;
int len = p.length();
int end = len;
for (int i = len; ; )
{
if (--i <= 0)
return "";
char c = p.charAt(i);
if (c == '/'
|| (this instanceof FilePath
&& c == File.separatorChar))
{
if (i+1 == len)
end = i;
else
return p.substring(i+1, end);
}
}
}
public String getExtension ()
{
String p = getPath();
if (p == null)
return null;
int len = p.length();
for (int i = len; ; )
{
if (--i <= 0)
return null;
char c = p.charAt(i);
boolean sawDot = false;
if (c == '.')
{
c = p.charAt(i-1);
sawDot = true;
}
if (c == '/'
|| (this instanceof FilePath
&& c == File.separatorChar))
return null;
if (sawDot)
return p.substring(i+1);
}
}
public int getPort ()
{
return -1;
}
public String getQuery ()
{
return null;
}
public String getFragment ()
{
return null;
}
public abstract URL toURL ();
/* #ifdef use:java.net.URI */
public abstract URI toUri ();
/* @deprecated */
public final URI toURI () { return toUri(); }
/* #else */
// public String toUri () { return toURIString(); }
// /* @deprecated */
// public final String toURI () { return toUri(); }
/* #endif */
public String toURIString () { return toUri().toString(); }
public Path resolve (Path relative)
{
if (relative.isAbsolute())
return relative;
return resolve(relative.toString());
}
public abstract Path resolve (String relative);
public static InputStream openInputStream (Object uri) throws IOException
{
return Path.valueOf(uri).openInputStream();
}
public abstract InputStream openInputStream () throws IOException;
public abstract OutputStream openOutputStream () throws IOException;
public Reader openReader (boolean ignoreEncodingErrors) throws IOException
{
throw new UnsupportedOperationException(); // FIXME
}
public Writer openWriter () throws IOException
{
return new OutputStreamWriter(openOutputStream());
}
/* #ifdef use:java.lang.CharSequence */
public CharSequence getCharContent (boolean ignoreEncodingErrors)
throws IOException
{
throw new UnsupportedOperationException(); // FIXME
}
/* #endif */
/** Convert an absolute URI to one relatve to a given base.
* This goes beyond java.net.URI.relativize in that if the arguments
* have a common prefix, it can create a relative URI using "../" steps.
*/
public static String relativize (String in, String base)
throws java.net.URISyntaxException, java.io.IOException
{
String baseStr = base;
String inStr = in;
/* #ifdef use:java.net.URI */
baseStr = new URI(baseStr).normalize().toString();
inStr = URLPath.valueOf(in).toURI().normalize().toString();
/* #endif */
int baseLen = baseStr.length();
int inLen = inStr.length();
int i = 0;
int sl = 0;
int colon = 0;
for (; i < baseLen && i < inLen; i++)
{
char cb = baseStr.charAt(i);
char ci = inStr.charAt(i);
if (cb != ci)
break;
if (cb == '/')
sl = i;
if (cb == ':')
colon = i;
}
if (colon > 0
&& (sl > colon + 2 || baseLen <= colon+2 || baseStr.charAt(colon+2) != '/')
/*
&& (colon + 2 != CLASS_RESOURCE_URI_PREFIX_LENGTH
|| ! inStr.substring(0, colon + 2).equals(CLASS_RESOURCE_URI_PREFIX)
|| getClassLoaderForURI(base) == getClassLoaderForURI(in))
*/
)
{
baseStr = baseStr.substring(sl+1);
inStr = inStr.substring(sl+1);
}
else
return in;
/* #ifdef JAVA5 */
StringBuilder sbuf = new StringBuilder();
/* #else */
// StringBuffer sbuf = new StringBuffer();
/* #endif */
sl = 0;
for (i = baseLen = baseStr.length(); --i >= 0; )
if (baseStr.charAt(i) == '/') // sep?
sbuf.append("../");
sbuf.append(inStr);
return sbuf.toString();
}
public String getName ()
{
return toString();
}
public Path getAbsolute ()
{
if (this == Path.userDirPath)
return resolve("");
else
return currentPath().resolve(this);
}
public Path getCanonical ()
{
return getAbsolute ();
}
/* #ifdef JAVA7 */
/** Convert if possible to a {@code java.nio.file.Path} instance.
* Use caution if this is a relative path and the {@code currentPath()}
* is not the default path, since {@code java.nio} assumes a relative path
* is relative to the default directory.
* Uses {@code java.nio.file.Paths#get(URI)}, and thus
* throws whatever that method throws.
* @throws FileSystemNotFoundException - no nio file system provider
* was found for the URI scheme
*/
public java.nio.file.Path toNPath()
throws java.nio.file.FileSystemNotFoundException {
return java.nio.file.Paths.get(toUri());
}
/* #endif */
public String probeContentType() {
String contentType;
/* #ifdef JAVA7 */
try {
contentType = Files.probeContentType(getAbsolute().toNPath());
} catch (Throwable ex) {
contentType = null;
}
/* #else */
// contentType = null;
/* #endif */
if (contentType == null) {
contentType = URLConnection.guessContentTypeFromName(getPath());
}
return contentType;
}
}
| |
/**********************************************************************************
* $URL$
* $Id$
**********************************************************************************
*
* Copyright (c) 2005, 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.jsf.util;
import java.io.IOException;
import java.util.Enumeration;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import lombok.extern.slf4j.Slf4j;
import org.sakaiproject.jsf2.util.JsfTool;
import org.sakaiproject.tool.api.ActiveTool;
import org.sakaiproject.tool.api.Tool;
import org.sakaiproject.tool.api.ToolException;
import org.sakaiproject.tool.api.ToolSession;
import org.sakaiproject.tool.cover.ActiveToolManager;
import org.sakaiproject.tool.cover.SessionManager;
import org.sakaiproject.tool.cover.ToolManager;
import org.sakaiproject.util.Web;
import org.sakaiproject.tool.assessment.data.ifc.assessment.AssessmentIfc;
import org.sakaiproject.tool.assessment.data.ifc.assessment.ItemTextAttachmentIfc;
import org.sakaiproject.tool.assessment.facade.AssessmentFacade;
import org.sakaiproject.tool.assessment.services.assessment.AssessmentService;
import org.sakaiproject.tool.assessment.services.assessment.PublishedAssessmentService;
import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil;
import org.sakaiproject.tool.assessment.ui.bean.author.AnswerBean;
import org.sakaiproject.tool.assessment.ui.bean.author.AssessmentSettingsBean;
import org.sakaiproject.tool.assessment.ui.bean.author.AssessmentBean;
import org.sakaiproject.tool.assessment.ui.bean.author.AuthorBean;
import org.sakaiproject.tool.assessment.ui.bean.author.ItemAuthorBean;
import org.sakaiproject.tool.assessment.ui.bean.author.PublishedAssessmentSettingsBean;
import org.sakaiproject.tool.assessment.ui.bean.author.SectionBean;
import org.sakaiproject.tool.assessment.ui.bean.delivery.ItemContentsBean;
import org.sakaiproject.tool.assessment.ui.bean.evaluation.QuestionScoresBean;
import org.sakaiproject.tool.assessment.ui.bean.evaluation.TotalScoresBean;
import org.sakaiproject.tool.assessment.ui.bean.authz.AuthorizationBean;
import org.sakaiproject.tool.assessment.ui.bean.util.EmailBean;
/**
* <p>
* Customized JsfTool for Samigo - just to workaround the fact that Samigo
* has the JSF URL mapping "*.faces" hard-coded in several places. If
* all instances of "*.faces" were changed to "*.jsf", this class could be removed.
* </p>
*
*/
@Slf4j
public class SamigoJsfTool extends JsfTool {
private static final String HELPER_EXT = ".helper";
private static final String HELPER_SESSION_PREFIX = "session.";
private static final String HELPER_RETURN_NOTIFICATION = "/returnToCaller";
private static final String RESET_ASSESSMENT_BEAN = "/resetAssessmentBean";
/**
* Recognize a path that is a resource request. It must have an "extension", i.e. a dot followed by characters that do not include a slash.
*
* @param path
* The path to check
* @return true if the path is a resource request, false if not.
*/
protected boolean isResourceRequest(String path)
{
log.debug("****0. inside isResourceRequest, path="+path);
// we need some path
if ((path == null) || (path.length() == 0)) return false;
// we need a last dot
int pos = path.lastIndexOf(".");
if (pos == -1) return false;
// we need that last dot to be the end of the path, not burried in the path somewhere (i.e. no more slashes after the last dot)
String ext = path.substring(pos);
log.debug("****1. inside isResourceRequest, ext="+ext);
if (ext.indexOf("/") != -1) return false;
// these are JSF pages, not resources
// THESE LINES OF CODE IS THE ONLY REASON THIS CLASS EXISTS!
if (ext.equals(".jsf")) return false;
if (ext.equals(".faces")) return false;
if (path.startsWith("/faces/")) return false;
if (path.indexOf(".helper") > -1) return false;
// ok, it's a resource request
return true;
}
protected void dispatch(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
// NOTE: this is a simple path dispatching, taking the path as the view id = jsp file name for the view,
// with default used if no path and a path prefix as configured.
// build up the target that will be dispatched to
String target = req.getPathInfo();
log.debug("***0. dispatch, target ="+target);
// see if we need to reset the assessmentBean, such as when returning
// from a helper
// TODO: there MUST be a cleaner way to do this!! These dependencies
// shouldn't be here!!
if (target != null && target.startsWith(RESET_ASSESSMENT_BEAN)) {
AssessmentBean assessmentBean = (AssessmentBean) ContextUtil
.lookupBeanFromExternalServlet("assessmentBean", req, res);
if (assessmentBean != null && assessmentBean.getAssessmentId() != null) {
AssessmentIfc assessment;
AuthorBean author = (AuthorBean) ContextUtil.lookupBean("author");
AssessmentService assessmentService;
if (author.getIsEditPendingAssessmentFlow()) {
assessmentService = new AssessmentService();
}
else {
assessmentService = new PublishedAssessmentService();
}
assessment = assessmentService.getAssessment(Long.valueOf(assessmentBean.getAssessmentId()));
assessmentBean.setAssessment(assessment);
}
target = target.replaceFirst(RESET_ASSESSMENT_BEAN, "");
}
// see if this is a helper trying to return to caller
if (HELPER_RETURN_NOTIFICATION.equals(target)) {
ToolSession session = SessionManager.getCurrentToolSession();
target = (String) session.getAttribute(ToolManager.getCurrentTool()
.getId()
+ Tool.HELPER_DONE_URL);
if (target != null) {
session.removeAttribute(ToolManager.getCurrentTool().getId()
+ Tool.HELPER_DONE_URL);
res.sendRedirect(target);
return;
}
}
boolean sendToHelper = sendToHelper(req, res);
boolean isResourceRequest = isResourceRequest(target);
log.debug("***1. dispatch, send to helper ="+sendToHelper);
log.debug("***2. dispatch, isResourceRequest ="+ isResourceRequest);
// see if we have a helper request
if (sendToHelper) {
return;
}
if (isResourceRequest) {
// get a dispatcher to the path
RequestDispatcher resourceDispatcher = getServletContext().getRequestDispatcher(target);
if (resourceDispatcher != null) {
resourceDispatcher.forward(req, res);
return;
}
}
if (target == null || "/".equals(target)) {
target = computeDefaultTarget();
// make sure it's a valid path
if (!target.startsWith("/")){
target = "/" + target;
}
// now that we've messed with the URL, send a redirect to make it official
res.sendRedirect(Web.returnUrl(req, target));
return;
}
// see if we want to change the specifically requested view
String newTarget = redirectRequestedTarget(target);
// make sure it's a valid path
if (!newTarget.startsWith("/")){
newTarget = "/" + newTarget;
}
if (!newTarget.equals(target)){
// now that we've messed with the URL, send a redirect to make it official
res.sendRedirect(Web.returnUrl(req, newTarget));
return;
}
target = newTarget;
// store this
ToolSession toolSession = SessionManager.getCurrentToolSession();
if (toolSession!=null){
toolSession.setAttribute(LAST_VIEW_VISITED, target);
}
log.debug("3a. dispatch: toolSession="+toolSession);
log.debug("3b. dispatch: target="+target);
log.debug("3c. dispatch: lastview?"+m_defaultToLastView);
//check direct URL permissions
AuthorizationBean authBean = (AuthorizationBean) ContextUtil.lookupBeanFromExternalServlet("authorization", req, res);
if (target.indexOf("/jsf/author/") > -1 &&
!authBean.getAdminPrivilege() &&
!authBean.getCreateAssessment() &&
!authBean.getEditAnyAssessment() &&
!authBean.getEditOwnAssessment() &&
!authBean.getDeleteAnyAssessment() &&
!authBean.getDeleteOwnAssessment() &&
!authBean.getPublishAnyAssessment() &&
!authBean.getPublishOwnAssessment()) {
log.debug("***4a. dispatch, authorization error : path="+target);
target = computeDefaultTarget(false);
}
if (target.indexOf("/jsf/evaluation/") > -1 &&
!authBean.getAdminPrivilege() &&
!authBean.getGradeAnyAssessment() &&
!authBean.getGradeOwnAssessment()) {
log.debug("***4b. dispatch, authorization error : path="+target);
target = computeDefaultTarget(false);
}
if (target.indexOf("/jsf/template/") > -1 &&
!authBean.getAdminPrivilege() &&
!authBean.getCreateTemplate() &&
!authBean.getEditOwnTemplate() &&
!authBean.getDeleteOwnTemplate()) {
log.debug("***4c. dispatch, authorization error : path="+target);
target = computeDefaultTarget(false);
}
//based on assessmentHeadings.jsp, "event" and "section-activity" paths are based on questionpool permissions
//TODO : create custom permissions for event and section-activity
if ((target.indexOf("/jsf/questionpool/") > -1 || target.indexOf("/jsf/event/") > -1 || target.indexOf("/jsf/section-activity/") > -1) &&
!authBean.getAdminPrivilege() &&
!authBean.getAdminQuestionPool() &&
!authBean.getCreateQuestionPool() &&
!authBean.getEditOwnQuestionPool() &&
!authBean.getDeleteOwnQuestionPool() &&
!authBean.getCopyOwnQuestionPool()) {
log.debug("***4d. dispatch, authorization error : path="+target);
target = computeDefaultTarget(false);
}
// add the configured folder root and extension (if missing)
target = m_path + target;
// add the default JSF extension (if we have no extension)
int lastSlash = target.lastIndexOf("/");
int lastDot = target.lastIndexOf(".");
if (lastDot < 0 || lastDot < lastSlash){
target += JSF_EXT;
}
// set the information that can be removed from return URLs
req.setAttribute(URL_PATH, m_path);
req.setAttribute(URL_EXT, ".jsp");
// set the sakai request object wrappers to provide the native, not Sakai set up, URL information
// - this assures that the FacesServlet can dispatch to the proper view based on the path info
req.setAttribute(Tool.NATIVE_URL, Tool.NATIVE_URL);
// TODO: Should setting the HTTP headers be moved up to the portal level as well?
res.setContentType("text/html; charset=UTF-8");
res.addDateHeader("Expires", System.currentTimeMillis() - (1000L * 60L * 60L * 24L * 365L));
res.addDateHeader("Last-Modified", System.currentTimeMillis());
res.addHeader("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0, post-check=0, pre-check=0");
res.addHeader("Pragma", "no-cache");
// dispatch to the target
log.debug("***5. dispatch, dispatching path: " + req.getPathInfo() + " to: " + target + " context: "
+ getServletContext().getServletContextName());
// if this is a return from the file picker and going back to
// case 1: item mofification, then set
// itemAuthorbean.attachmentlist = filepicker list
if (target.indexOf("/jsf/author/item/") > -1
&& ("true").equals(toolSession.getAttribute("SENT_TO_FILEPICKER_HELPER"))){
ItemAuthorBean bean = (ItemAuthorBean) ContextUtil.lookupBeanFromExternalServlet(
"itemauthor", req, res);
// For EMI Item Attachments
AnswerBean emiQAComboItem = bean.getCurrentAnswer();
if (emiQAComboItem == null) {
bean.setItemAttachment();
}
else {
emiQAComboItem.setItemTextAttachment();
}
toolSession.removeAttribute("SENT_TO_FILEPICKER_HELPER");
}
// case 2: part mofification, then set
// sectionBean.attachmentList = filepicker list
else if (target.indexOf("/jsf/author/editPart") > -1
&& ("true").equals(toolSession.getAttribute("SENT_TO_FILEPICKER_HELPER"))){
SectionBean bean = (SectionBean) ContextUtil.lookupBeanFromExternalServlet(
"sectionBean", req, res);
bean.setPartAttachment();
toolSession.removeAttribute("SENT_TO_FILEPICKER_HELPER");
}
// case 3.1: assessment settings mofification, then set assessmentSettingsBean.attachmentList = filepicker list
else if (target.indexOf("/jsf/author/authorSettings") > -1
&& ("true").equals(toolSession.getAttribute("SENT_TO_FILEPICKER_HELPER"))){
AssessmentSettingsBean bean = (AssessmentSettingsBean) ContextUtil.lookupBeanFromExternalServlet(
"assessmentSettings", req, res);
bean.setAssessmentAttachment();
toolSession.removeAttribute("SENT_TO_FILEPICKER_HELPER");
}
// case 3.2: published assessment settings mofification, then set assessmentSettingsBean.attachmentList = filepicker list
else if (target.indexOf("/jsf/author/publishedSettings") > -1
&& ("true").equals(toolSession.getAttribute("SENT_TO_FILEPICKER_HELPER"))){
PublishedAssessmentSettingsBean bean = (PublishedAssessmentSettingsBean) ContextUtil.lookupBeanFromExternalServlet(
"publishedSettings", req, res);
bean.setAssessmentAttachment();
toolSession.removeAttribute("SENT_TO_FILEPICKER_HELPER");
}
// case 4: create new mail, then set
// emailBean.attachmentList = filepicker list
else if (target.indexOf("/jsf/evaluation/createNewEmail") > -1
&& ("true").equals(toolSession.getAttribute("SENT_TO_FILEPICKER_HELPER"))) {
EmailBean bean = (EmailBean) ContextUtil.lookupBeanFromExternalServlet("email", req, res);
bean.prepareAttachment();
toolSession.removeAttribute("SENT_TO_FILEPICKER_HELPER");
}
else if (target.indexOf("/jsf/evaluation/questionScore") > -1
&& ("true").equals(toolSession.getAttribute("SENT_TO_FILEPICKER_HELPER"))) {
QuestionScoresBean bean = (QuestionScoresBean) ContextUtil.lookupBeanFromExternalServlet("questionScores", req, res);
bean.setAttachment((Long) toolSession.getAttribute("itemGradingId"));
toolSession.removeAttribute("SENT_TO_FILEPICKER_HELPER");
}
else if (target.indexOf("/jsf/evaluation/gradeStudentResult") > -1
&& ("true").equals(toolSession.getAttribute("SENT_TO_FILEPICKER_HELPER"))) {
ItemContentsBean bean = (ItemContentsBean) ContextUtil.lookupBeanFromExternalServlet("itemContents", req, res);
bean.setAttachment((Long) toolSession.getAttribute("itemGradingId"));
toolSession.removeAttribute("SENT_TO_FILEPICKER_HELPER");
}
else if (target.indexOf("/jsf/evaluation/totalScores") > -1
&& ("true").equals(toolSession.getAttribute("SENT_TO_FILEPICKER_HELPER"))) {
TotalScoresBean bean = (TotalScoresBean) ContextUtil.lookupBeanFromExternalServlet("totalScores", req, res);
bean.setAttachment((Long) toolSession.getAttribute("assessmentGradingId"));
toolSession.removeAttribute("SENT_TO_FILEPICKER_HELPER");
}
RequestDispatcher dispatcher = getServletContext().getRequestDispatcher(target);
dispatcher.forward(req, res);
// restore the request object
req.removeAttribute(Tool.NATIVE_URL);
req.removeAttribute(URL_PATH);
req.removeAttribute(URL_EXT);
}
protected boolean sendToHelper(HttpServletRequest req, HttpServletResponse res)
throws ToolException {
String path = req.getPathInfo();
if (path == null) path = "/";
// 0 parts means the path was just "/", otherwise parts[0] = "", parts[1] = item id, parts[2]
// if present is "edit"...
String[] parts = path.split("/");
log.debug("***a. sendToHelper.partLength="+parts.length);
String helperPath =null;
String toolPath=null;
// e.g. helper url in Samigo can be /jsf/author/item/sakai.filepicker.helper/tool
// or /sakai.filepicker.helper
if (parts.length > 2){
log.debug("***b. sendToHelper.partLength="+parts.length);
helperPath = parts[parts.length - 2];
toolPath = parts[parts.length - 1];
}
else if (parts.length == 2){
log.debug("***c. sendToHelper.partLength="+parts.length);
helperPath = parts[1];
}
else return false;
if (!helperPath.endsWith(HELPER_EXT)) return false;
log.debug("****d. sendToHelper, part #1="+helperPath);
log.debug("****e. sendToHelper, part #2="+toolPath);
ToolSession toolSession = SessionManager.getCurrentToolSession();
toolSession.setAttribute("SENT_TO_FILEPICKER_HELPER", "true");
Enumeration params = req.getParameterNames();
while (params.hasMoreElements()) {
String paramName = (String)params.nextElement();
if (paramName.startsWith(HELPER_SESSION_PREFIX)) {
String attributeName = paramName.substring(HELPER_SESSION_PREFIX.length());
toolSession.setAttribute(attributeName, req.getParameter(paramName));
}
}
// calc helper id
int posEnd = helperPath.lastIndexOf(".");
String helperId = helperPath.substring(0, posEnd);
log.debug("****f. sendToHelper, helperId="+helperId);
ActiveTool helperTool = ActiveToolManager.getActiveTool(helperId);
String url = req.getContextPath() + req.getServletPath();
if (toolSession.getAttribute(helperTool.getId() + Tool.HELPER_DONE_URL) == null) {
toolSession.setAttribute(helperTool.getId() + Tool.HELPER_DONE_URL,
url + RESET_ASSESSMENT_BEAN + computeDefaultTarget(true));
}
log.debug("****g. sendToHelper, url="+url);
String context = url + "/"+ helperPath;
log.debug("****h. sendToHelper, context="+context);
if (toolPath != null)
helperTool.help(req, res, context, "/"+toolPath);
else
helperTool.help(req, res, context, "");
return true; // was handled as helper call
}
protected String computeDefaultTarget(boolean lastVisited){
// setup for the default view as configured
ToolSession session = SessionManager.getCurrentToolSession();
String target = "/" + m_default;
// if we are doing lastVisit and there's a last-visited view, for this tool placement / user, use that
if (lastVisited) {
String last = (String) session.getAttribute(LAST_VIEW_VISITED);
if (last != null) {
target = last;
}
}
session.removeAttribute(LAST_VIEW_VISITED);
log.debug("***3. computeDefaultTarget()="+target);
return target;
}
}
| |
/**
* Copyright (c) 2013-2020 Contributors to the Eclipse Foundation
*
* <p> See the NOTICE file distributed with this work for additional information regarding copyright
* ownership. All rights reserved. This program and the accompanying materials are made available
* under the terms of the Apache License, Version 2.0 which accompanies this distribution and is
* available at http://www.apache.org/licenses/LICENSE-2.0.txt
*/
package org.locationtech.geowave.core.geotime.index.dimension;
import java.nio.ByteBuffer;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.TimeZone;
import org.locationtech.geowave.core.index.StringUtils;
import org.locationtech.geowave.core.index.VarintUtils;
import org.locationtech.geowave.core.index.dimension.bin.BinRange;
import org.locationtech.geowave.core.index.dimension.bin.BinValue;
import org.locationtech.geowave.core.index.dimension.bin.IndexBinningStrategy;
import org.locationtech.geowave.core.index.numeric.NumericData;
import org.locationtech.geowave.core.index.numeric.NumericRange;
import org.threeten.extra.Interval;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
/**
* This class is useful for establishing a consistent binning strategy using a unit of time. Each
* bin will then be defined by the boundaries of that unit within the timezone given in the
* constructor. So if the unit is year and the data spreads across 2011-2013, the bins will be 2011,
* 2012, and 2013. The unit chosen should represent a much more significant range than the average
* query range (at least 20x larger) for efficiency purposes. So if the average query is for a 24
* hour period, the unit should not be a day, but could be perhaps a month or a year (depending on
* the temporal extent of the dataset).
*/
public class TemporalBinningStrategy implements IndexBinningStrategy {
public static enum Unit {
MINUTE(Calendar.MINUTE),
HOUR(Calendar.HOUR_OF_DAY),
DAY(Calendar.DAY_OF_MONTH),
WEEK(Calendar.WEEK_OF_YEAR),
MONTH(Calendar.MONTH),
YEAR(Calendar.YEAR),
DECADE(-1);
// java.util.Calendar does not define a field number for decade
// use -1 since that value is unused
private final int calendarEnum;
private Unit(final int calendarEnum) {
this.calendarEnum = calendarEnum;
}
public int toCalendarEnum() {
return calendarEnum;
}
public static Unit getUnit(final int calendarEnum) {
for (final Unit u : values()) {
if (u.calendarEnum == calendarEnum) {
return u;
}
}
throw new IllegalArgumentException(
"Calendar enum '" + calendarEnum + "' not found as a valid unit ");
}
// converter that will be used later
public static Unit fromString(final String code) {
for (final Unit output : Unit.values()) {
if (output.toString().equalsIgnoreCase(code)) {
return output;
}
}
return null;
}
}
protected static final long MILLIS_PER_DAY = 86400000L;
private static final NumberFormat TWO_DIGIT_NUMBER = NumberFormat.getIntegerInstance();
{
TWO_DIGIT_NUMBER.setMinimumIntegerDigits(2);
TWO_DIGIT_NUMBER.setMaximumIntegerDigits(2);
}
private Unit unit;
private String timezone;
public TemporalBinningStrategy() {}
public TemporalBinningStrategy(final Unit unit) {
this(unit, "GMT");
}
public TemporalBinningStrategy(final Unit unit, final String timezone) {
this.unit = unit;
this.timezone = timezone;
}
@Override
public double getBinMin() {
return 0;
}
@Override
public double getBinMax() {
return getBinSizeMillis() - 1;
}
/** Method used to bin a raw date in milliseconds to a binned value of the Binning Strategy. */
@Override
public BinValue getBinnedValue(final double value) {
// convert to a calendar and subtract the epoch for the bin
final Calendar epochCal = Calendar.getInstance(TimeZone.getTimeZone(timezone));
epochCal.setTimeInMillis((long) value);
setToEpoch(epochCal);
// use the value to get the bin ID (although the epoch should work fine
// too)
final Calendar valueCal = Calendar.getInstance(TimeZone.getTimeZone(timezone));
valueCal.setTimeInMillis((long) value);
return new BinValue(
getBinId(valueCal),
valueCal.getTimeInMillis() - epochCal.getTimeInMillis());
}
private long getBinSizeMillis() {
long binSizeMillis = MILLIS_PER_DAY;
// use the max possible value for that unit as the bin size
switch (unit) {
case DECADE:
binSizeMillis *= 3653;
break;
case YEAR:
default:
binSizeMillis *= 366;
break;
case MONTH:
binSizeMillis *= 31;
break;
case WEEK:
binSizeMillis *= 7;
break;
case DAY:
break;
case HOUR:
binSizeMillis /= 24;
break;
case MINUTE:
binSizeMillis /= 1440;
break;
}
return binSizeMillis;
}
@SuppressFBWarnings(
value = {"SF_SWITCH_FALLTHROUGH", "SF_SWITCH_NO_DEFAULT"},
justification = "Fallthrough intentional for time parsing; default case is provided")
protected void setToEpoch(final Calendar value) {
// reset appropriate values to 0 based on the unit
switch (unit) {
case DECADE:
value.set(Calendar.YEAR, ((value.get(Calendar.YEAR) / 10) * 10));
// don't break so that the other fields are also set to the
// minimum
case YEAR:
default:
value.set(Calendar.MONTH, value.getActualMinimum(Calendar.MONTH));
// don't break so that the other fields are also set to the
// minimum
case MONTH:
value.set(Calendar.DAY_OF_MONTH, value.getActualMinimum(Calendar.DAY_OF_MONTH));
// don't break so that the other fields are also set to the
// minimum
case DAY:
value.set(Calendar.HOUR_OF_DAY, value.getActualMinimum(Calendar.HOUR_OF_DAY));
// don't break so that the other fields are also set to the
// minimum
case HOUR:
value.set(Calendar.MINUTE, value.getActualMinimum(Calendar.MINUTE));
// don't break so that the other fields are also set to the
// minimum
case MINUTE:
value.set(Calendar.SECOND, value.getActualMinimum(Calendar.SECOND));
value.set(Calendar.MILLISECOND, value.getActualMinimum(Calendar.MILLISECOND));
break; // special handling for week
case WEEK:
value.set(Calendar.DAY_OF_WEEK, value.getActualMinimum(Calendar.DAY_OF_WEEK));
value.set(Calendar.HOUR_OF_DAY, value.getActualMinimum(Calendar.HOUR_OF_DAY));
value.set(Calendar.MINUTE, value.getActualMinimum(Calendar.MINUTE));
value.set(Calendar.SECOND, value.getActualMinimum(Calendar.SECOND));
value.set(Calendar.MILLISECOND, value.getActualMinimum(Calendar.MILLISECOND));
}
}
@Override
public int getFixedBinIdSize() {
switch (unit) {
case YEAR:
default:
return 4;
case MONTH:
return 7;
case WEEK:
return 7;
case DAY:
return 10;
case HOUR:
return 13;
case MINUTE:
return 16;
}
}
public byte[] getBinId(final long millis) {
final Calendar valueCal = Calendar.getInstance(TimeZone.getTimeZone(timezone));
valueCal.setTimeInMillis(millis);
return getBinId(valueCal);
}
private byte[] getBinId(final Calendar value) {
// this is assuming we want human-readable bin ID's but alternatively we
// could consider returning a more compressed representation
switch (unit) {
case YEAR:
default:
return StringUtils.stringToBinary(Integer.toString(value.get(Calendar.YEAR)));
case MONTH:
return StringUtils.stringToBinary(
(Integer.toString(value.get(Calendar.YEAR))
+ "_"
+ TWO_DIGIT_NUMBER.format(value.get(Calendar.MONTH))));
case WEEK:
return StringUtils.stringToBinary(
Integer.toString(value.getWeekYear())
+ "_"
+ TWO_DIGIT_NUMBER.format(value.get(Calendar.WEEK_OF_YEAR)));
case DAY:
return StringUtils.stringToBinary(
(Integer.toString(value.get(Calendar.YEAR))
+ "_"
+ TWO_DIGIT_NUMBER.format(value.get(Calendar.MONTH))
+ "_"
+ TWO_DIGIT_NUMBER.format(value.get(Calendar.DAY_OF_MONTH))));
case HOUR:
return StringUtils.stringToBinary(
(Integer.toString(value.get(Calendar.YEAR))
+ "_"
+ TWO_DIGIT_NUMBER.format(value.get(Calendar.MONTH))
+ "_"
+ TWO_DIGIT_NUMBER.format(value.get(Calendar.DAY_OF_MONTH))
+ "_"
+ TWO_DIGIT_NUMBER.format(value.get(Calendar.HOUR_OF_DAY))));
case MINUTE:
return StringUtils.stringToBinary(
(Integer.toString(value.get(Calendar.YEAR))
+ "_"
+ TWO_DIGIT_NUMBER.format(value.get(Calendar.MONTH))
+ "_"
+ TWO_DIGIT_NUMBER.format(value.get(Calendar.DAY_OF_MONTH))
+ "_"
+ TWO_DIGIT_NUMBER.format(value.get(Calendar.HOUR_OF_DAY))
+ "_"
+ TWO_DIGIT_NUMBER.format(value.get(Calendar.MINUTE))));
}
}
@SuppressFBWarnings(
value = {"SF_SWITCH_FALLTHROUGH", "SF_SWITCH_NO_DEFAULT"},
justification = "Fallthrough intentional for time parsing")
private Calendar getStartEpoch(final byte[] binId) {
final String str = StringUtils.stringFromBinary(binId);
final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone(timezone));
switch (unit) {
case MINUTE:
final int minute = Integer.parseInt(str.substring(14, 16));
cal.set(Calendar.MINUTE, minute);
case HOUR:
final int hour = Integer.parseInt(str.substring(11, 13));
cal.set(Calendar.HOUR_OF_DAY, hour);
case DAY:
final int day = Integer.parseInt(str.substring(8, 10));
cal.set(Calendar.DAY_OF_MONTH, day);
case MONTH:
final int month = Integer.parseInt(str.substring(5, 7));
cal.set(Calendar.MONTH, month);
case YEAR:
default:
final int year = Integer.parseInt(str.substring(0, 4));
cal.set(Calendar.YEAR, year);
break; // do not automatically fall-through to decade parsing
case DECADE:
int decade = Integer.parseInt(str.substring(0, 4));
decade = (decade / 10) * 10; // int division will truncate ones
cal.set(Calendar.YEAR, decade);
break; // special handling for week
case WEEK:
final int yr = Integer.parseInt(str.substring(0, 4));
final int weekOfYear = Integer.parseInt(str.substring(5, 7));
cal.setWeekDate(yr, weekOfYear, cal.getActualMinimum(Calendar.DAY_OF_WEEK));
break;
}
setToEpoch(cal);
return cal;
}
private Calendar getEndExclusive(final Calendar startOfEpoch) {
final Calendar endExclusive = Calendar.getInstance(TimeZone.getTimeZone(timezone));
endExclusive.setTime(startOfEpoch.getTime());
switch (unit) {
case MINUTE:
endExclusive.add(Calendar.MINUTE, 1);
return endExclusive;
case HOUR:
endExclusive.add(Calendar.HOUR_OF_DAY, 1);
return endExclusive;
case DAY:
endExclusive.add(Calendar.DAY_OF_MONTH, 1);
return endExclusive;
case MONTH:
endExclusive.add(Calendar.MONTH, 1);
return endExclusive;
case DECADE:
endExclusive.add(Calendar.YEAR, 10);
return endExclusive;
case WEEK:
endExclusive.add(Calendar.WEEK_OF_YEAR, 1);
return endExclusive;
case YEAR:
default:
endExclusive.add(Calendar.YEAR, 1);
return endExclusive;
}
}
public BinRange[] getNormalizedRanges(final Interval range) {
return getNormalizedRanges(range.getStart().toEpochMilli(), range.getEnd().toEpochMilli());
}
private BinRange[] getNormalizedRanges(final long min, final long max) {
final Calendar startEpoch = Calendar.getInstance(TimeZone.getTimeZone(timezone));
final long binSizeMillis = getBinSizeMillis();
// initialize the epoch to the range min and then reset appropriate
// values to 0 based on the units
startEpoch.setTimeInMillis(min);
setToEpoch(startEpoch);
// now make sure all bin definitions between the start and end bins
// are covered
final long startEpochMillis = startEpoch.getTimeInMillis();
long epochIterator = startEpochMillis;
final List<BinRange> bins = new ArrayList<>();
// track this, so that we can easily declare a range to be the full
// extent and use the information to perform a more efficient scan
boolean firstBin = (min != startEpochMillis);
boolean lastBin = false;
do {
// because not every year has 366 days, and not every month has 31
// days we need to reset next epoch to the actual epoch
final Calendar nextEpochCal = Calendar.getInstance(TimeZone.getTimeZone(timezone));
// set it to a value in the middle of the bin just to be sure (for
// example if the bin size does not get to the next epoch as is
// the case when units are days and the timezone accounts for
// daylight savings time)
nextEpochCal.setTimeInMillis(epochIterator + (long) (binSizeMillis * 1.5));
setToEpoch(nextEpochCal);
final long nextEpoch = nextEpochCal.getTimeInMillis();
final long maxOfBin = nextEpoch - 1;
final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone(timezone));
cal.setTimeInMillis(epochIterator);
long startMillis, endMillis;
boolean fullExtent;
if (max <= maxOfBin) {
lastBin = true;
endMillis = max;
// its questionable whether we use
fullExtent = (max == maxOfBin);
} else {
endMillis = maxOfBin;
fullExtent = !firstBin;
}
if (firstBin) {
startMillis = min;
firstBin = false;
} else {
startMillis = epochIterator;
}
// we have the millis for range, but to normalize for this bin we
// need to subtract the epoch of the bin
bins.add(
new BinRange(
getBinId(cal),
startMillis - epochIterator,
endMillis - epochIterator,
fullExtent));
epochIterator = nextEpoch;
// iterate until we reach our end epoch
} while (!lastBin);
return bins.toArray(new BinRange[bins.size()]);
}
@Override
public BinRange[] getNormalizedRanges(final NumericData range) {
if ((range == null) || (range.getMax() < range.getMin())) {
return new BinRange[] {};
}
return getNormalizedRanges(range.getMin().longValue(), range.getMax().longValue());
}
@Override
public byte[] toBinary() {
final byte[] timeZone = StringUtils.stringToBinary(timezone);
final ByteBuffer binary =
ByteBuffer.allocate(timezone.length() + VarintUtils.signedIntByteLength(unit.calendarEnum));
VarintUtils.writeSignedInt(unit.calendarEnum, binary);
binary.put(timeZone);
return binary.array();
}
@Override
public void fromBinary(final byte[] bytes) {
final ByteBuffer buffer = ByteBuffer.wrap(bytes);
final int unitCalendarEnum = VarintUtils.readSignedInt(buffer);
final byte[] timeZoneName = new byte[buffer.remaining()];
buffer.get(timeZoneName);
unit = Unit.getUnit(unitCalendarEnum);
timezone = StringUtils.stringFromBinary(timeZoneName);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
final String className = getClass().getName();
result = (prime * result) + ((className == null) ? 0 : className.hashCode());
result = (prime * result) + ((timezone == null) ? 0 : timezone.hashCode());
result = (prime * result) + ((unit == null) ? 0 : unit.calendarEnum);
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final TemporalBinningStrategy other = (TemporalBinningStrategy) obj;
if (timezone == null) {
if (other.timezone != null) {
return false;
}
} else if (!timezone.equals(other.timezone)) {
return false;
}
if (unit == null) {
if (other.unit != null) {
return false;
}
} else if (unit.calendarEnum != other.unit.calendarEnum) {
return false;
}
return true;
}
@Override
public NumericRange getDenormalizedRanges(final BinRange binnedRange) {
final Calendar startofEpoch = getStartEpoch(binnedRange.getBinId());
final long startOfEpochMillis = startofEpoch.getTimeInMillis();
final long minMillis = startOfEpochMillis + (long) binnedRange.getNormalizedMin();
final long maxMillis = startOfEpochMillis + (long) binnedRange.getNormalizedMax();
return new NumericRange(minMillis, maxMillis);
}
public Interval getInterval(final byte[] binId) {
final Calendar startOfEpoch = getStartEpoch(binId);
return Interval.of(startOfEpoch.toInstant(), getEndExclusive(startOfEpoch).toInstant());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.heron.common.utils.metrics;
import java.util.List;
import org.apache.heron.api.generated.TopologyAPI;
import org.apache.heron.api.metric.CountMetric;
import org.apache.heron.api.metric.MeanReducer;
import org.apache.heron.api.metric.MeanReducerState;
import org.apache.heron.api.metric.MultiCountMetric;
import org.apache.heron.api.metric.MultiReducedMetric;
import org.apache.heron.common.basics.SingletonRegistry;
import org.apache.heron.common.config.SystemConfig;
import org.apache.heron.common.utils.misc.PhysicalPlanHelper;
import org.apache.heron.common.utils.topology.TopologyContextImpl;
/**
* Bolt's metrics to be collect
* We need to:
* 1. Define the metrics to be collected
* 2. New them in the constructor
* 3. Register them in registerMetrics(...) by using MetricsCollector's registerMetric(...)
* 4. Expose methods which could be called externally to change the value of metrics
*/
public class FullBoltMetrics extends BoltMetrics {
private final MultiCountMetric ackCount;
private final MultiReducedMetric<MeanReducerState, Number, Double> processLatency;
private final MultiReducedMetric<MeanReducerState, Number, Double> failLatency;
private final MultiCountMetric failCount;
private final MultiCountMetric executeCount;
private final MultiReducedMetric<MeanReducerState, Number, Double> executeLatency;
// Time in nano-seconds spending in execute() at every interval
private final MultiCountMetric executeTimeNs;
private final MultiCountMetric emitCount;
private final CountMetric tupleAddedToQueue;
private final MultiCountMetric totalDeserializationTimeNs;
private final MultiCountMetric totalSerializationTimeNs;
private final MultiReducedMetric<MeanReducerState, Number, Double> averageSerializationTimeNs;
private final MultiReducedMetric<MeanReducerState, Number, Double> averageDeserializationTimeNs;
// The # of times back-pressure happens on outStreamQueue
// so instance could not produce more tuples
private final CountMetric outQueueFullCount;
public FullBoltMetrics() {
ackCount = new MultiCountMetric();
processLatency = new MultiReducedMetric<>(new MeanReducer());
failLatency = new MultiReducedMetric<>(new MeanReducer());
failCount = new MultiCountMetric();
executeCount = new MultiCountMetric();
executeLatency = new MultiReducedMetric<>(new MeanReducer());
executeTimeNs = new MultiCountMetric();
emitCount = new MultiCountMetric();
outQueueFullCount = new CountMetric();
tupleAddedToQueue = new CountMetric();
totalDeserializationTimeNs = new MultiCountMetric();
totalSerializationTimeNs = new MultiCountMetric();
averageSerializationTimeNs = new MultiReducedMetric<>(new MeanReducer());
averageDeserializationTimeNs = new MultiReducedMetric<>(new MeanReducer());
}
public void registerMetrics(TopologyContextImpl topologyContext) {
SystemConfig systemConfig =
(SystemConfig) SingletonRegistry.INSTANCE.getSingleton(SystemConfig.HERON_SYSTEM_CONFIG);
int interval = (int) systemConfig.getHeronMetricsExportInterval().getSeconds();
topologyContext.registerMetric("__ack-count", ackCount, interval);
topologyContext.registerMetric("__process-latency", processLatency, interval);
topologyContext.registerMetric("__fail-latency", failLatency, interval);
topologyContext.registerMetric("__fail-count", failCount, interval);
topologyContext.registerMetric("__execute-count", executeCount, interval);
topologyContext.registerMetric("__execute-latency", executeLatency, interval);
topologyContext.registerMetric("__execute-time-ns", executeTimeNs, interval);
topologyContext.registerMetric("__emit-count", emitCount, interval);
topologyContext.registerMetric("__out-queue-full-count", outQueueFullCount, interval);
topologyContext.registerMetric(
"__tuple-deserialization-time-ns", totalDeserializationTimeNs, interval);
topologyContext.registerMetric(
"__tuple-serialization-time-ns", totalSerializationTimeNs, interval);
topologyContext.registerMetric(
"__av-tuple-deserialization-time-ns", totalDeserializationTimeNs, interval);
topologyContext.registerMetric(
"__av-tuple-serialization-time-ns", totalSerializationTimeNs, interval);
topologyContext.registerMetric("__data-tuple-added-to-outgoing-queue/default",
tupleAddedToQueue, interval);
}
// For MultiCountMetrics, we need to set the default value for all streams.
// Otherwise, it is possible one metric for a particular stream is null.
// For instance, the fail-count on a particular stream could be undefined
// causing metrics not be exported.
// However, it will not set the Multi Reduced/Assignable Metrics,
// since we could not have default values for them
public void initMultiCountMetrics(PhysicalPlanHelper helper) {
// For bolt, we would consider both input stream and output stream
List<TopologyAPI.InputStream> inputs = helper.getMyBolt().getInputsList();
for (TopologyAPI.InputStream inputStream : inputs) {
String streamId = inputStream.getStream().getId();
String globalStreamId =
new StringBuilder(inputStream.getStream().getComponentName()).
append("/").append(streamId).toString();
ackCount.scope(streamId);
failCount.scope(streamId);
executeCount.scope(streamId);
executeTimeNs.scope(streamId);
ackCount.scope(globalStreamId);
failCount.scope(globalStreamId);
executeCount.scope(globalStreamId);
executeTimeNs.scope(globalStreamId);
}
List<TopologyAPI.OutputStream> outputs = helper.getMyBolt().getOutputsList();
for (TopologyAPI.OutputStream outputStream : outputs) {
String streamId = outputStream.getStream().getId();
emitCount.scope(streamId);
}
}
public void ackedTuple(String streamId, String sourceComponent, long latency) {
ackCount.scope(streamId).incr();
processLatency.scope(streamId).update(latency);
// Consider there are cases that different streams with the same streamId,
// but with different source component. We need to distinguish them too.
String globalStreamId =
new StringBuilder(sourceComponent).append("/").append(streamId).toString();
ackCount.scope(globalStreamId).incr();
processLatency.scope(globalStreamId).update(latency);
}
public void failedTuple(String streamId, String sourceComponent, long latency) {
failCount.scope(streamId).incr();
failLatency.scope(streamId).update(latency);
// Consider there are cases that different streams with the same streamId,
// but with different source component. We need to distinguish them too.
String globalStreamId =
new StringBuilder(sourceComponent).append("/").append(streamId).toString();
failCount.scope(globalStreamId).incr();
failLatency.scope(globalStreamId).update(latency);
}
public void executeTuple(String streamId, String sourceComponent, long latency) {
executeCount.scope(streamId).incr();
executeLatency.scope(streamId).update(latency);
executeTimeNs.scope(streamId).incrBy(latency);
// Consider there are cases that different streams with the same streamId,
// but with different source component. We need to distinguish them too.
String globalStreamId =
new StringBuilder(sourceComponent).append("/").append(streamId).toString();
executeCount.scope(globalStreamId).incr();
executeLatency.scope(globalStreamId).update(latency);
executeTimeNs.scope(globalStreamId).incrBy(latency);
}
public void emittedTuple(String streamId) {
emitCount.scope(streamId).incr();
}
public void addTupleToQueue(int size) {
tupleAddedToQueue.incr();
}
public void updateOutQueueFullCount() {
outQueueFullCount.incr();
}
public void deserializeDataTuple(String streamId, String sourceComponent, long latency) {
totalDeserializationTimeNs.scope(streamId).incrBy(latency);
averageDeserializationTimeNs.scope(streamId).update(latency);
// Consider there are cases that different streams with the same streamId,
// but with different source component. We need to distinguish them too.
String globalStreamId =
new StringBuilder(sourceComponent).append("/").append(streamId).toString();
totalDeserializationTimeNs.scope(globalStreamId).incrBy(latency);
averageDeserializationTimeNs.scope(globalStreamId).update(latency);
}
public void serializeDataTuple(String streamId, long latency) {
totalSerializationTimeNs.scope(streamId).incrBy(latency);
averageSerializationTimeNs.scope(streamId).update(latency);
}
}
| |
package de.fhac.ti.yagi.vm.memory.models.action;
import de.fhac.ti.yagi.vm.exceptions.IncompatibleOperationException;
import de.fhac.ti.yagi.vm.exceptions.VarNotInScopeException;
import de.fhac.ti.yagi.vm.interfaces.ConditionObject;
import de.fhac.ti.yagi.vm.memory.SetItem;
import de.fhac.ti.yagi.vm.memory.SetType;
import de.fhac.ti.yagi.vm.memory.models.Var;
import java.util.List;
import java.util.Map;
public class Atom implements ConditionObject {
private AtomRule mRuleNumber;
private Var mFirstVar;
private Var mSecondVar;
private List<SetItem> mFirstSet;
private SetType mFirstSetType;
private List<SetItem> mSecondSet;
private SetType mSecondSetType;
private CompOperator mCompOp;
private boolean mStaticExpression;
private Map<String, Var> mScope;
public Atom(AtomRule rule, Map<String, Var> scope) {
mRuleNumber = rule;
mScope = scope;
}
@Override
public boolean evaluate() throws IncompatibleOperationException, VarNotInScopeException {
boolean result = false;
if (mRuleNumber == AtomRule.FIRST) {
if (mFirstVar.getSetType() != mSecondVar.getSetType()) {
throw new IncompatibleOperationException("Types are incompatible.");
}
// INT comp
if (mFirstVar.getSetType() == SetType.INT) {
result = evaluateInt();
}
// STRING comp
else if (mFirstVar.getSetType() == SetType.STRING) {
result = evaluateString();
}
// lookup in the current scope required
else if (mFirstVar.getSetType() == SetType.UNDEFINED) {
if (mScope.containsKey(mFirstVar.getName()) && mScope.containsKey(mSecondVar.getName())) {
mFirstVar = mScope.get(mFirstVar.getName());
mSecondVar = mScope.get(mSecondVar.getName());
if (mFirstVar.getSetType() != mSecondVar.getSetType()) {
throw new IncompatibleOperationException("Types are incompatible.");
}
// INT comp
if (mFirstVar.getSetType() == SetType.INT) {
result = evaluateInt();
}
// STRING comp
else if (mFirstVar.getSetType() == SetType.STRING) {
result = evaluateString();
}
} else {
throw new VarNotInScopeException("Var [" + mFirstVar.getName() + "] or [" + mSecondVar.getName()
+ "] is not defined in the current scope.");
}
}
} else if (mRuleNumber == AtomRule.SECOND) {
if (mFirstSetType != mSecondSetType) {
throw new IncompatibleOperationException("Types are incompatible.");
}
result = false;
if (mFirstSetType == SetType.INT) {
result = evaluateSetComp();
}
} else if (mRuleNumber == AtomRule.THIRD) {
if (mFirstVar.getSetType() != mFirstSetType) {
throw new IncompatibleOperationException("Types are incompatible.");
}
result = false;
// INT comp
if (mFirstVar.getSetType() == SetType.INT) {
for (SetItem item : mFirstSet) {
int value = Integer.parseInt(mFirstVar.getmValue());
int itemVal = Integer.parseInt(item.getValue());
if (value == itemVal) {
result = true;
break;
}
}
}
// STRING comp
else if (mFirstVar.getSetType() == SetType.STRING) {
for (SetItem item : mFirstSet) {
String value = mFirstVar.getmValue();
String itemVal = item.getValue();
if (value.equals(itemVal)) {
result = true;
break;
}
}
}
} else if (mRuleNumber == AtomRule.FOURTH || mRuleNumber == AtomRule.FIFTH) {
result = mStaticExpression;
}
return result;
}
private boolean evaluateInt() {
boolean intEval = false;
int first = Integer.parseInt(mFirstVar.getmValue());
int second = Integer.parseInt(mSecondVar.getmValue());
if (mCompOp == CompOperator.EQUALS) { intEval = first == second; }
else if (mCompOp == CompOperator.NOT_EQUALS) { intEval = first != second; }
else if (mCompOp == CompOperator.LESS_OR_EQUAL) { intEval = first <= second; }
else if (mCompOp == CompOperator.GREATER_OR_EQUAL) { intEval = first >= second; }
else if (mCompOp == CompOperator.LESS) { intEval = first < second; }
else if (mCompOp == CompOperator.GREATER) { intEval = first > second; }
return intEval;
}
private boolean evaluateString() {
boolean stringEval = false;
String first = mFirstVar.getmValue();
String second = mSecondVar.getmValue();
if (mCompOp == CompOperator.EQUALS) { stringEval = first.length() == second.length(); }
else if (mCompOp == CompOperator.NOT_EQUALS) { stringEval = first.length() != second.length(); }
else if (mCompOp == CompOperator.LESS_OR_EQUAL) { stringEval = first.length() <= second.length(); }
else if (mCompOp == CompOperator.GREATER_OR_EQUAL) { stringEval = first.length() >= second.length(); }
else if (mCompOp == CompOperator.LESS) { stringEval = first.length() < second.length(); }
else if (mCompOp == CompOperator.GREATER) { stringEval = first.length() > second.length(); }
return stringEval;
}
private boolean evaluateSetComp() {
boolean setCompResult = false;
if (mCompOp == CompOperator.EQUALS) { setCompResult = mFirstSet.size() == mSecondSet.size(); }
else if (mCompOp == CompOperator.NOT_EQUALS) { setCompResult = mFirstSet.size() != mSecondSet.size(); }
else if (mCompOp == CompOperator.LESS_OR_EQUAL) { setCompResult = mFirstSet.size() <= mSecondSet.size(); }
else if (mCompOp == CompOperator.GREATER_OR_EQUAL) { setCompResult = mFirstSet.size() >= mSecondSet.size(); }
else if (mCompOp == CompOperator.LESS) { setCompResult = mFirstSet.size() < mSecondSet.size(); }
else if (mCompOp == CompOperator.GREATER) { setCompResult = mFirstSet.size() > mSecondSet.size(); }
return setCompResult;
}
public void setFirstVar(Var firstVar) {
mFirstVar = firstVar;
}
public void setSecondVar(Var secondVar) {
mSecondVar = secondVar;
}
public void setFirstSet(List<SetItem> firstSet) {
mFirstSet = firstSet;
}
public void setFirstSetType(SetType firstSetType) {
mFirstSetType = firstSetType;
}
public void setSecondSet(List<SetItem> secondSet) {
mSecondSet = secondSet;
}
public void setSecondSetType(SetType secondSetType) {
mSecondSetType = secondSetType;
}
public void setCompOp(CompOperator compOp) {
mCompOp = compOp;
}
public void setStaticExpression(boolean staticExpression) {
mStaticExpression = staticExpression;
}
public void updateScope(Map<String, Var> scope) {
mScope = scope;
for (Map.Entry<String, Var> entry : mScope.entrySet()) {
String key = entry.getKey();
Var value = entry.getValue();
// the atom's scope should only be updated if it's not a static boolean expression
if (!mStaticExpression && (mRuleNumber == AtomRule.FIRST || mRuleNumber == AtomRule.THIRD)) {
if (mFirstVar.getName().equals(key)) {
mFirstVar.setmValue(value.getmValue());
mFirstVar.setmSetType(value.getSetType());
} else if (mSecondVar.getName().equals(key)) {
mSecondVar.setmValue(value.getmValue());
mSecondVar.setmSetType(value.getSetType());
}
}
}
}
public enum AtomRule {
FIRST,
SECOND,
THIRD,
FOURTH,
FIFTH
}
}
| |
package com.example.gpslogger;
import android.Manifest;
import android.app.AlertDialog;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Build;
import android.os.Bundle;
import android.os.SystemClock;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v4.content.LocalBroadcastManager;
import android.support.v7.app.AppCompatActivity;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.Chronometer;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.location.ActivityRecognition;
import com.google.android.gms.common.api.Status;
import com.google.android.gms.common.api.ResultCallback;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
public class MainActivity extends AppCompatActivity implements GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener, ResultCallback<Status>
{
private Button startStopButton;
private Button resetButton;
private Chronometer chrono;
private long timePassed;
private GPSHandler gpsHandler;
private LocationManager locationManager;
private SessionIDManager sessionIDManager;
private NotificationHandler notificationHandler;
private ActivityRecognitionBroadcastReceiver activityRecognitionBroadcastReceiver;
private GoogleApiClient googleApiClient;
private boolean gpsEnabled;
private int sessionID;
private int clicked;
private String activity;
private int confidence;
private String time;
private DateFormat dateFormat;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
dateFormat = new SimpleDateFormat("HH:mm:ss");
activity = "nothing";
confidence = -1;
clicked = 0;
gpsHandler = new GPSHandler();
sessionIDManager = new SessionIDManager();
activityRecognitionBroadcastReceiver = new ActivityRecognitionBroadcastReceiver();
try {
sessionID = sessionIDManager.generateSessionID();
} catch (Exception e) {
e.printStackTrace();
}
gpsHandler.setClicked(clicked);
startStopButton = (Button) findViewById(R.id.start_stop_button);
startStopButton.setOnClickListener(handler);
resetButton = (Button) findViewById(R.id.resetButton);
resetButton.setEnabled(false);
chrono = (Chronometer) findViewById(R.id.chronometer);
timePassed = 0;
chrono.setText("00:00:00");
notificationHandler = new NotificationHandler(getApplicationContext(), 1);
notificationHandler.createNotification();
locationManager = (LocationManager) this.getSystemService(Context.LOCATION_SERVICE);
//noinspection ResourceType
locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 3000, 0, new MyLocationListener(gpsHandler));
gpsEnabled = locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER);
if(gpsEnabled){
startStopButton.setEnabled(true);
} else {
startStopButton.setEnabled(false);
enableGPS(false);
}
googleApiClient = new GoogleApiClient.Builder(this)
.addApi(ActivityRecognition.API)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.build();
googleApiClient.connect();
}
public void enableGPS(final boolean gpsEnabled){
if(!gpsEnabled){
AlertDialog.Builder dialog = new AlertDialog.Builder(this);
dialog.setMessage("Activate GPS in settings");
dialog.setNegativeButton("Cancel", new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface paramDialogInterface, int paramInt) {
enableGPS(false);
}
});
dialog.setPositiveButton("Settings", new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface paramDialogInterface, int paramInt) {
startActivityForResult(new Intent(android.provider.Settings.ACTION_LOCATION_SOURCE_SETTINGS), 100);
}
});
dialog.show();
}
}
private View.OnClickListener handler = new View.OnClickListener(){
public void onClick(View v){
TextView loggingStatusText = (TextView) findViewById(R.id.loggingStatusTextView);
//Sets the format of the chronometer to HH:MM:SS
chrono.setOnChronometerTickListener(new Chronometer.OnChronometerTickListener()
{
@Override
public void onChronometerTick(Chronometer chronoArg) {
long time = SystemClock.elapsedRealtime() - chronoArg.getBase();
int h = (int) (time / 3600000);
int m = (int) (time - h * 3600000) / 60000;
int s = (int) (time - h * 3600000 - m * 60000) / 1000;
String hh = h < 10 ? "0" + h : h + "";
String mm = m < 10 ? "0" + m : m + "";
String ss = s < 10 ? "0" + s : s + "";
chronoArg.setText(hh + ":" + mm + ":" + ss);
}
});
switch(clicked){
case 0:
loggingStatusText.setText("Logging On");
startStopButton.setText("Stop");
resetButton.setEnabled(false);
notificationHandler = new NotificationHandler(getApplicationContext(), clicked);
notificationHandler.createNotification();
gpsHandler.setClicked(clicked);
clicked++;
SystemClock.setCurrentTimeMillis(timePassed);
chrono.setBase(SystemClock.elapsedRealtime() - timePassed);
chrono.start();
break;
case 1:
loggingStatusText.setText("Logging Off");
startStopButton.setText("Start");
resetButton.setEnabled(true);
notificationHandler = new NotificationHandler(getApplicationContext(), clicked);
notificationHandler.createNotification();
gpsHandler.setClicked(clicked);
clicked--;
chrono.stop();
timePassed = SystemClock.elapsedRealtime() - chrono.getBase();
break;
}
}
};
public void resetApp(View v) throws Exception {
TextView loggingStatusText = (TextView) findViewById(R.id.loggingStatusTextView);
loggingStatusText.setText("Logging Off");
startStopButton.setText("Start");
resetButton.setEnabled(false);
chrono.stop();
clicked = 0;
timePassed = 0;
SystemClock.setCurrentTimeMillis(timePassed);
chrono.setBase(SystemClock.elapsedRealtime() - timePassed);
chrono.setText("00:00:00");
sessionID = sessionIDManager.generateSessionID();
}
@Override
public void onBackPressed() {
moveTaskToBack(true);
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if(keyCode == KeyEvent.KEYCODE_BACK){
moveTaskToBack(true);
return true;
}
return super.onKeyDown(keyCode, event);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_exit) {
notificationHandler = new NotificationHandler(getApplicationContext(), 2);
notificationHandler.createNotification();
if(googleApiClient.isConnected()) {
removeActivityUpdates(getCurrentFocus());
}
finish();
System.exit(0);
return true;
} else if(id == R.id.action_settings) {
startActivityForResult(new Intent(android.provider.Settings.ACTION_LOCATION_SOURCE_SETTINGS), 100);
}
return super.onOptionsItemSelected(item);
}
protected void onStart() {
super.onStart();
LocalBroadcastManager.getInstance(this).registerReceiver(activityRecognitionBroadcastReceiver, new IntentFilter("com.example.gpslogger.ACTIVITY_ACTION"));
}
@Override
public void onConnected(Bundle bundle) {
}
@Override
public void onConnectionSuspended(int i) {
googleApiClient.connect();
}
@Override
public void onConnectionFailed(ConnectionResult connectionResult) {
}
public void requestActivityUpdates(View view) {
if (!googleApiClient.isConnected()) {
Toast.makeText(this, "GoogleApiClient not yet connected", Toast.LENGTH_SHORT).show();
} else {
ActivityRecognition.ActivityRecognitionApi.requestActivityUpdates(googleApiClient, 0, getActivityDetectionPendingIntent()).setResultCallback(this);
}
}
public void removeActivityUpdates(View view) {
ActivityRecognition.ActivityRecognitionApi.removeActivityUpdates(googleApiClient, getActivityDetectionPendingIntent()).setResultCallback(this);
}
private PendingIntent getActivityDetectionPendingIntent() {
Intent intent = new Intent(this, ActivityRecognitionIntentService.class);
intent.setAction(Long.toString(System.currentTimeMillis()));
return PendingIntent.getService(this, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT);
}
@Override
public void onResult(Status status) {
}
public class ActivityRecognitionBroadcastReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
if(action.equals("com.example.gpslogger.ACTIVITY_ACTION")) {
Bundle extras = intent.getExtras();
if(extras != null){
activity = extras.getString("Activity");
confidence = extras.getInt("Confidence");
}
}
}
}
private class MyLocationListener implements LocationListener
{
private GPSHandler gpsHandler;
public MyLocationListener(GPSHandler gpsHandler){
this.gpsHandler = gpsHandler;
}
@Override
public void onLocationChanged(Location location) {
requestActivityUpdates(getCurrentFocus());
time = dateFormat.format(new Date());
if(gpsHandler.getClicked() == 0) {
gpsHandler.addData(location.getLongitude(), location.getLatitude(), location.getAltitude(),
location.getAccuracy(), location.getSpeed(), activity, confidence, sessionID, time);
}
if(!gpsHandler.listIsEmpty()) {
new NetworkConnection(getApplicationContext(), gpsHandler).execute();
}
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
Toast.makeText(getApplicationContext(),"GPS Status Changed", Toast.LENGTH_SHORT).show();
}
@Override
public void onProviderEnabled(String provider) {
startStopButton.setEnabled(true);
Toast.makeText(getApplicationContext(),"GPS Enabled", Toast.LENGTH_SHORT).show();
}
@Override
public void onProviderDisabled(String provider) {
startStopButton.setEnabled(false);
Toast.makeText(getApplicationContext(),"GPS Disabled", Toast.LENGTH_SHORT).show();
}
}
}
| |
package com.perforce.p4java.tests.dev.unit.endtoend;
import com.perforce.p4java.client.IClient;
import com.perforce.p4java.core.IChangelist;
import com.perforce.p4java.core.file.FileSpecBuilder;
import com.perforce.p4java.core.file.IFileSpec;
import com.perforce.p4java.impl.generic.client.ClientOptions;
import com.perforce.p4java.impl.mapbased.rpc.sys.helper.SysFileHelperBridge;
import com.perforce.p4java.tests.SimpleServerRule;
import com.perforce.p4java.tests.dev.annotations.TestId;
import com.perforce.p4java.tests.dev.unit.P4JavaRshTestCase;
import org.apache.commons.io.FileUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Test;
import java.io.File;
import java.util.List;
import java.util.Properties;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* The ClientOptionsTest class exercises the ClientOptions class.
* The test verifies the ClientOptions() as it affects files
*/
@TestId("ClientOptionsE2ETest01")
public class ClientOptionsE2ETest extends P4JavaRshTestCase {
@ClassRule
public static SimpleServerRule p4d = new SimpleServerRule("r16.1", ChangelistE2ETest.class.getSimpleName());
private static IClient client = null;
private static String clientDir;
private static String sourceFile;
@BeforeClass
public static void before() throws Exception {
Properties properties = new Properties();
setupServer(p4d.getRSHURL(), "p4jtestuser", "p4jtestuser", true, properties);
client = getDefaultClient(server);
clientDir = defaultTestClientName + File.separator + testId;
server.setCurrentClient(client);
sourceFile = client.getRoot() + File.separator + textBaseFile;
createTestSourceFile(sourceFile, false);
}
/**
* allwrite noallwrite Leaves all files writable on the client;
* else only checked out files are writable. If set, files may be clobbered
* ignoring the clobber option below.
*/
@Test
public void testSetAllWrite() {
try {
debugPrintTestName();
//get the default options
ClientOptions clientOpts = new ClientOptions();
assertFalse("Default setting for isAllWrite should be false.", clientOpts.isAllWrite());
String clientRoot = client.getRoot();
assertNotNull("clientRoot should not be Null.", clientRoot);
server.setCurrentClient(client);
//create the files
String newFile = clientRoot + File.separator + clientDir + File.separator + "testfileCO.txt";
String newFilePath = clientRoot + File.separator + clientDir;
File file1 = new File(newFilePath + File.separator + prepareTestFile(sourceFile, newFile, true));
File file2 = new File(newFilePath + File.separator + prepareTestFile(sourceFile, newFile, true));
File file3 = new File(newFilePath + File.separator + prepareTestFile(sourceFile, newFile, true));
File file4 = new File(newFilePath + File.separator + prepareTestFile(sourceFile, newFile, true));
File file5 = new File(newFilePath + File.separator + prepareTestFile(sourceFile, newFile, true));
//now set the allwrite option to true
clientOpts.setAllWrite(true);
assertTrue("isAllWrite should be true.",clientOpts.isAllWrite());
//create the filelist
final String[] filePaths = {
file1.getAbsolutePath(),
file2.getAbsolutePath(),
file3.getAbsolutePath(),
file4.getAbsolutePath(),
file5.getAbsolutePath()
};
//add the files, submit them, reopen them
IChangelist changelistImpl = getNewChangelist(server, client,
"Changelist to submit files for " + getName());
IChangelist changelist = client.createChangelist(changelistImpl);
List<IFileSpec> fileSpecs = FileSpecBuilder.makeFileSpecList(filePaths);
assertNotNull("FileSpecs should not be Null.", fileSpecs);
List<IFileSpec> addedFileSpecs = client.addFiles(fileSpecs, false, 0, P4JTEST_FILETYPE_TEXT, false);
assertEquals("Number built & added fileSpecs should be equal.", fileSpecs.size(), addedFileSpecs.size());
//submit files. Check if added files are in the correct changelist.
List<IFileSpec> reopenedFileSpecs = client.reopenFiles(fileSpecs, changelist.getId(), P4JTEST_FILETYPE_TEXT);
List<IFileSpec> submittedFileSpecs = changelist.submit(true);
int numSubmitted = FileSpecBuilder.getValidFileSpecs(submittedFileSpecs).size();
assertEquals("numSubmitted should equal number of files created.", filePaths.length, numSubmitted);
//verify the file permission
assertTrue("File1 should be writeable.", file1.canWrite());
assertTrue("File2 should be writeable.", file2.canWrite());
assertTrue("File3 should be writeable.", file3.canWrite());
assertTrue("File4 should be writeable.", file4.canWrite());
assertTrue("File5 should be writeable.", file5.canWrite());
//reset option
clientOpts.setAllWrite(false);
assertFalse("Setting for isAllWrite should be false.",clientOpts.isAllWrite());
//Create a new changelist & open files
changelistImpl = getNewChangelist(server, client,
"Changelist to submit files for " + getName());
changelist = client.createChangelist(changelistImpl);
List<IFileSpec> editedFileSpecs = client.editFiles(fileSpecs, false, false, 0, P4JTEST_FILETYPE_TEXT);
assertEquals("Number built & edit fileSpecs should be equal.", fileSpecs.size(), editedFileSpecs.size());
reopenedFileSpecs.clear();
reopenedFileSpecs = client.reopenFiles(fileSpecs, changelist.getId(), P4JTEST_FILETYPE_TEXT);
//submit files and check if they are writable
submittedFileSpecs.clear();
submittedFileSpecs = changelist.submit(false);
numSubmitted = FileSpecBuilder.getValidFileSpecs(submittedFileSpecs).size();
assertEquals("numSubmitted should equal number of files created.", filePaths.length, numSubmitted);
//verify the file permissions
assertFalse("File1 should not be writeable.", file1.canWrite());
assertFalse("File2 should not be writeable.", file2.canWrite());
assertFalse("File3 should not be writeable.", file3.canWrite());
assertFalse("File4 should not be writeable.", file4.canWrite());
assertFalse("File5 should not be writeable.", file5.canWrite());
} catch (Exception exc){
fail("Unexpected Exception: " + exc + " - " + exc.getLocalizedMessage());
}
}
/**
* clobber noclobber Allows 'p4 sync' to overwrite writable files on the client.
* noclobber is ignored if allwrite is set.
* This functionality was tested at the setter and getter level, but \
* now we need to verify at the server level if this setting is heeded.
* Once that's done, this test can (hopefully) pass.
*/
@Test
@Ignore("Functionality not yet verified")
public void testSetClobber() {
try {
debugPrintTestName();
String verOptions = getVerificationString(false, true, false, false, false, false);
ClientOptions clientOpts = new ClientOptions(verOptions);
clientOpts.setClobber(true);
fail("Functionality not yet verified");
} catch (Exception exc){
fail("Unexpected Exception: " + exc + " - " + exc.getLocalizedMessage());
}
}
/**
* compress nocompress Compresses data sent between the client
* and server to speed up slow connections.
* This functionality was tested at the setter and getter level, but \
* now we need to verify at the server level if this setting is heeded.
* Once that's done, this test can (hopefully) pass.
*/
@Test
@Ignore("Functionality not yet verified")
public void testSetCompress() {
try {
debugPrintTestName();
String verOptions = getVerificationString(false, false, true, false, false, false);
ClientOptions clientOpts = new ClientOptions(verOptions);
clientOpts.setCompress(true);
fail("Functionality not yet verified");
} catch (Exception exc){
fail("Unexpected Exception: " + exc + " - " + exc.getLocalizedMessage());
}
}
/**
* locked unlocked Allows only the client owner to use the
* client or change its specification. Prevents the client from being deleted.
* This functionality was tested at the setter and getter level, but
* now we need to verify at the server level if this setting is heeded.
* Once that's done, this test can (hopefully) pass.
*/
@Test
@Ignore("Functionality not yet verified")
public void testSetLocked() {
try {
debugPrintTestName();
String verOptions = getVerificationString(false, false, false, true, false, false);
ClientOptions clientOpts = new ClientOptions(verOptions);
clientOpts.setLocked(true);
fail("Functionality not yet verified");
} catch (Exception exc){
fail("Unexpected Exception: " + exc + " - " + exc.getLocalizedMessage());
}
}
/**
* modtime nomodtime Causes 'p4 sync' to preserve modification time from submitting client,
* as with files with +m type modifier. Otherwise modification time is left as
* when the file was fetched.
* This functionality was tested at the setter and getter level, but \
* now we need to verify at the server level if this setting is heeded.
* Once that's done, this test can (hopefully) pass.
*/
@Test
public void testSetModtime() {
try {
debugPrintTestName();
ClientOptions clientOpts = new ClientOptions();
clientOpts.setRmdir(true);
} catch (Exception exc){
fail("Unexpected Exception: " + exc + " - " + exc.getLocalizedMessage());
}
}
/**
* rmdir normdir Makes 'p4 sync' attempt to delete a client
* directory when all files are removed.
*/
@Test
public void testRmdir() {
try {
debugPrintTestName();
//get the default options
ClientOptions tClient = new ClientOptions();
assertFalse("Default setting for isRmdir() should be false.", tClient.isRmdir());
tClient.setRmdir(true);
assertTrue("Setting for isRmdir() should be true.", tClient.isRmdir());
String clientRoot = client.getRoot();
assertNotNull("clientRoot should not be Null.", clientRoot);
//create the files
String newFile = clientRoot + File.separator + clientDir + File.separator + "RMDIRTEST" + File.separator + "testfileCO.txt";
String newFilePath = clientRoot + File.separator + clientDir + File.separator + "RMDIRTEST";
File parentDir = new File(newFilePath);
SysFileHelperBridge.getSysFileCommands().setWritable(parentDir.getCanonicalPath(), true);
if (parentDir.exists() && parentDir.isDirectory()) {
FileUtils.cleanDirectory(parentDir);
}
File file1 = new File(newFilePath + File.separator + prepareTestFile(sourceFile, newFile, true));
File file2 = new File(newFilePath + File.separator + prepareTestFile(sourceFile, newFile, true));
File file3 = new File(newFilePath + File.separator + prepareTestFile(sourceFile, newFile, true));
//create the filelist
final String[] filePaths = {
file1.getAbsolutePath(),
file2.getAbsolutePath(),
file3.getAbsolutePath(),
};
//add the files, submit them, reopen them
IChangelist changelistImpl = getNewChangelist(server, client,
"Changelist to submit files for " + getName());
IChangelist changelist = client.createChangelist(changelistImpl);
List<IFileSpec> fileSpecs = FileSpecBuilder.makeFileSpecList(filePaths);
assertNotNull("FileSpecs should not be Null.", fileSpecs);
List<IFileSpec> addedFileSpecs = client.addFiles(fileSpecs, false, 0, P4JTEST_FILETYPE_TEXT, false);
assertEquals("Number built & added fileSpecs should be equal.", fileSpecs.size(), addedFileSpecs.size());
//submit files. Check if added files are in the correct changelist.
List<IFileSpec> reopenedFileSpecs = client.reopenFiles(fileSpecs, changelist.getId(), P4JTEST_FILETYPE_TEXT);
List<IFileSpec> submittedFileSpecs = changelist.submit(false);
int numSubmitted = FileSpecBuilder.getValidFileSpecs(submittedFileSpecs).size();
assertEquals("numSubmitted should equal number of files created.", filePaths.length, numSubmitted);
List<IFileSpec> deletedFileSpecs = client.deleteFiles(fileSpecs, 0, false);
int numDeleted = FileSpecBuilder.getValidFileSpecs(deletedFileSpecs).size();
assertEquals("numSubmitted should equal number of files created.", filePaths.length, numDeleted);
//submit the deleted files.
changelistImpl = getNewChangelist(server, client,
"Changelist to submit files for " + getName());
changelist = client.createChangelist(changelistImpl);
reopenedFileSpecs.clear();
reopenedFileSpecs = client.reopenFiles(fileSpecs, changelist.getId(), P4JTEST_FILETYPE_TEXT);
assertEquals("Number built & reopened fileSpecs should be equal.", fileSpecs.size(), reopenedFileSpecs.size());
submittedFileSpecs.clear();
submittedFileSpecs = changelist.submit(false);
List<IFileSpec> syncFileSpecs = client.sync(fileSpecs, true, false, false, false);
dumpFileSpecInfo(syncFileSpecs, "Sync'ed files:");
assertTrue(parentDir.listFiles() == null || parentDir.listFiles().length == 0);
tClient.setRmdir(false);
assertFalse("Setting for isRmdir() should be false.", tClient.isRmdir());
} catch (Exception exc){
fail("Unexpected Exception: " + exc + " - " + exc.getLocalizedMessage());
}
}
/**
* This helper function takes the boolean values passed in and converts them to Perforce-standard
* representation of these options for ClientOptions. The string that is returned is useful for
* comparison against the return value of the toString() method of the ClientOptions class.
*/
private String getVerificationString(boolean allWriteVal, boolean clobberVal,
boolean compressVal, boolean lockedVal, boolean modtimeVal, boolean rmdirVal) {
String vString = null;
vString = allWriteVal ? "allwrite" : "noallwrite";
vString += clobberVal ? " clobber" : " noclobber";
vString += compressVal ? " compress" : " nocompress";
vString += lockedVal ? " locked" : " nolocked";
vString += modtimeVal ? " modtime" : " nomodtime";
vString += rmdirVal ? " rmdir" : " normdir";
return vString;
}
@AfterClass
public static void afterAll() throws Exception {
afterEach(server);
}
}
| |
package io.agrest.runtime.cayenne.processor.update;
import io.agrest.AgObjectId;
import io.agrest.CompoundObjectId;
import io.agrest.EntityParent;
import io.agrest.EntityUpdate;
import io.agrest.AgException;
import io.agrest.ResourceEntity;
import io.agrest.SimpleObjectId;
import io.agrest.meta.AgEntity;
import io.agrest.meta.AgRelationship;
import io.agrest.processor.Processor;
import io.agrest.processor.ProcessorOutcome;
import io.agrest.runtime.cayenne.processor.Util;
import io.agrest.runtime.meta.IMetadataService;
import io.agrest.runtime.processor.update.UpdateContext;
import org.apache.cayenne.Cayenne;
import org.apache.cayenne.DataObject;
import org.apache.cayenne.Fault;
import org.apache.cayenne.ObjectContext;
import org.apache.cayenne.map.DbAttribute;
import org.apache.cayenne.map.DbEntity;
import org.apache.cayenne.map.ObjAttribute;
import org.apache.cayenne.map.ObjEntity;
import org.apache.cayenne.map.ObjRelationship;
import org.apache.cayenne.reflect.ClassDescriptor;
import javax.ws.rs.core.Response;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @since 2.7
*/
public abstract class CayenneUpdateDataStoreStage implements Processor<UpdateContext<?>> {
private IMetadataService metadataService;
public CayenneUpdateDataStoreStage(IMetadataService metadataService) {
this.metadataService = metadataService;
}
@Override
public ProcessorOutcome execute(UpdateContext<?> context) {
sync((UpdateContext<DataObject>) context);
CayenneUpdateStartStage.cayenneContext(context).commitChanges();
// Stores parent-child result list in ResourceEntity
// TODO Replace this by dedicated select child stage during of update stages refactoring
ResourceEntity entity = context.getEntity();
Map<String, ResourceEntity<?>> children = entity.getChildren();
List rootResult = new ArrayList();
for (EntityUpdate<?> u : context.getUpdates()) {
DataObject o = (DataObject) u.getMergedTo();
//saves root elements
rootResult.add(o);
//assigns children
assignChildrenToParent(o, entity, children);
}
entity.setResult(rootResult);
return ProcessorOutcome.CONTINUE;
}
protected void assignChildrenToParent(DataObject root, ResourceEntity<?> parent, Map<String, ResourceEntity<?>> children) {
if (!children.isEmpty()) {
for (Map.Entry<String, ResourceEntity<?>> e : children.entrySet()) {
ResourceEntity childEntity = e.getValue();
Object result = root.readPropertyDirectly(e.getKey());
if (result == null || result instanceof Fault) {
continue;
}
AgObjectId id = root.getObjectId().getIdSnapshot().size() > 1
? new CompoundObjectId(root.getObjectId().getIdSnapshot())
: new SimpleObjectId(root.getObjectId().getIdSnapshot().values().iterator().next());
AgRelationship rel = parent.getAgEntity().getRelationship(e.getKey());
if (rel.isToMany() && result instanceof List) {
List r = (List) result;
childEntity.setToManyResult(id, r);
for (Object ro : r) {
assignChildrenToParent((DataObject) ro, childEntity, childEntity.getChildren());
}
} else {
childEntity.setToOneResult(id, result);
assignChildrenToParent((DataObject) result, childEntity, childEntity.getChildren());
}
}
}
}
protected abstract <T extends DataObject> void sync(UpdateContext<T> context);
protected <T extends DataObject> void create(UpdateContext<T> context) {
ObjectRelator relator = createRelator(context);
for (EntityUpdate<T> u : context.getUpdates()) {
createSingle(context, relator, u);
}
}
protected <T extends DataObject> void updateSingle(UpdateContext<T> context, T o, Collection<EntityUpdate<T>> updates) {
ObjectRelator relator = createRelator(context);
for (EntityUpdate<T> u : updates) {
mergeChanges(u, o, relator);
}
relator.relateToParent(o);
}
protected <T extends DataObject> void createSingle(UpdateContext<T> context, ObjectRelator relator, EntityUpdate<T> u) {
ObjectContext objectContext = CayenneUpdateStartStage.cayenneContext(context);
DataObject o = objectContext.newObject(context.getType());
Map<String, Object> idMap = u.getId();
// set explicit ID
if (idMap != null) {
if (context.isIdUpdatesDisallowed() && u.isExplicitId()) {
throw new AgException(Response.Status.BAD_REQUEST, "Setting ID explicitly is not allowed: " + idMap);
}
ObjEntity entity = objectContext.getEntityResolver().getObjEntity(context.getType());
if (isPrimaryKey(entity.getDbEntity(), idMap.keySet())) {
// AgId is the same as the PK,
// no need to make an additional check that the AGId is unique
for (DbAttribute pk : entity.getDbEntity().getPrimaryKeys()) {
Object id = idMap.get(pk.getName());
if (id == null) {
continue;
}
setPrimaryKey(o, entity, pk, id);
}
} else {
// need to make an additional check that the AgId is unique
// TODO: I guess this should be done in a separate new context
T existing = Util.findById(objectContext, context.getType(), context.getEntity().getAgEntity(), idMap);
if (existing != null) {
throw new AgException(Response.Status.BAD_REQUEST, "Can't create '" + entity.getName()
+ "' with id " + CompoundObjectId.mapToString(idMap) + " -- object already exists");
}
for (Map.Entry<String, Object> idPart : idMap.entrySet()) {
DbAttribute pk = null;
for (DbAttribute _pk : entity.getDbEntity().getPrimaryKeys()) {
if (_pk.getName().equals(idPart.getKey())) {
pk = _pk;
break;
}
}
if (pk == null) {
DbAttribute dbAttribute = entity.getDbEntity().getAttribute(idPart.getKey());
if (dbAttribute == null) {
throw new AgException(Response.Status.BAD_REQUEST, "Can't create '" + entity.getName()
+ "' with id " + CompoundObjectId.mapToString(idMap) + " -- unknown db attribute: " + idPart.getKey());
}
ObjAttribute objAttribute = entity.getAttributeForDbAttribute(dbAttribute);
if (objAttribute == null) {
throw new AgException(Response.Status.BAD_REQUEST, "Can't create '" + entity.getName()
+ "' with id " + CompoundObjectId.mapToString(idMap) + " -- unknown object attribute: " + idPart.getKey());
}
o.writeProperty(objAttribute.getName(), idPart.getValue());
} else {
setPrimaryKey(o, entity, pk, idPart.getValue());
}
}
}
}
mergeChanges(u, o, relator);
relator.relateToParent(o);
}
private void setPrimaryKey(DataObject o, ObjEntity entity, DbAttribute pk, Object id) {
// 1. meaningful ID
// TODO: must compile all this... figuring this on the fly is
// slow
ObjAttribute opk = entity.getAttributeForDbAttribute(pk);
if (opk != null) {
o.writeProperty(opk.getName(), id);
}
// 2. PK is auto-generated ... I guess this is sorta
// expected to fail - generated meaningless PK should not be
// pushed from the client
else if (pk.isGenerated()) {
throw new AgException(Response.Status.BAD_REQUEST, "Can't create '" + entity.getName()
+ "' with fixed id");
}
// 3. probably a propagated ID.
else {
// TODO: hopefully this works..
o.getObjectId().getReplacementIdMap().put(pk.getName(), id);
}
}
/**
* @return true if all PK columns are represented in {@code keys}
*/
private boolean isPrimaryKey(DbEntity entity, Collection<String> keys) {
Collection<DbAttribute> pks = entity.getPrimaryKeys();
for (DbAttribute pk : pks) {
if (!keys.contains(pk.getName())) {
return false;
}
}
return true;
}
private <T extends DataObject> void mergeChanges(EntityUpdate<T> entityUpdate, DataObject o, ObjectRelator relator) {
// attributes
for (Map.Entry<String, Object> e : entityUpdate.getValues().entrySet()) {
o.writeProperty(e.getKey(), e.getValue());
}
// relationships
ObjectContext context = o.getObjectContext();
ObjEntity entity = context.getEntityResolver().getObjEntity(o);
for (Map.Entry<String, Set<Object>> e : entityUpdate.getRelatedIds().entrySet()) {
ObjRelationship relationship = entity.getRelationship(e.getKey());
AgRelationship agRelationship = entityUpdate.getEntity().getRelationship(e.getKey());
// sanity check
if (agRelationship == null) {
continue;
}
final Set<Object> relatedIds = e.getValue();
if (relatedIds == null || relatedIds.isEmpty() || allElementsNull(relatedIds)) {
relator.unrelateAll(agRelationship, o);
continue;
}
if (!agRelationship.isToMany() && relatedIds.size() > 1) {
throw new AgException(Response.Status.BAD_REQUEST,
"Relationship is to-one, but received update with multiple objects: " +
agRelationship.getName());
}
ClassDescriptor relatedDescriptor = context.getEntityResolver().getClassDescriptor(
relationship.getTargetEntityName());
relator.unrelateAll(agRelationship, o, new RelationshipUpdate() {
@Override
public boolean containsRelatedObject(DataObject relatedObject) {
return relatedIds.contains(Cayenne.pkForObject(relatedObject));
}
@Override
public void removeUpdateForRelatedObject(DataObject relatedObject) {
relatedIds.remove(Cayenne.pkForObject(relatedObject));
}
});
for (Object relatedId : relatedIds) {
if (relatedId == null) {
continue;
}
DataObject related = (DataObject) Cayenne.objectForPK(context, relatedDescriptor.getObjectClass(),
relatedId);
if (related == null) {
throw new AgException(Response.Status.NOT_FOUND, "Related object '"
+ relationship.getTargetEntityName() + "' with ID '" + e.getValue() + "' is not found");
}
relator.relate(agRelationship, o, related);
}
}
// record this for the benefit of the downstream code that may want to
// order the results, etc...
entityUpdate.setMergedTo(o);
}
private boolean allElementsNull(Collection<?> elements) {
for (Object element : elements) {
if (element != null) {
return false;
}
}
return true;
}
protected <T extends DataObject> ObjectRelator createRelator(UpdateContext<T> context) {
final EntityParent<?> parent = context.getParent();
if (parent == null) {
return new ObjectRelator();
}
ObjectContext objectContext = CayenneUpdateStartStage.cayenneContext(context);
ObjEntity parentEntity = objectContext.getEntityResolver().getObjEntity(parent.getType());
AgEntity<?> parentAgEntity = metadataService.getAgEntity(context.getParent().getType());
final DataObject parentObject = (DataObject) Util.findById(objectContext, parent.getType(),
parentAgEntity, parent.getId().get());
if (parentObject == null) {
throw new AgException(Response.Status.NOT_FOUND, "No parent object for ID '" + parent.getId()
+ "' and entity '" + parentEntity.getName() + "'");
}
// TODO: check that relationship target is the same as <T> ??
if (parentEntity.getRelationship(parent.getRelationship()).isToMany()) {
return new ObjectRelator() {
@Override
public void relateToParent(DataObject object) {
parentObject.addToManyTarget(parent.getRelationship(), object, true);
}
};
} else {
return new ObjectRelator() {
@Override
public void relateToParent(DataObject object) {
parentObject.setToOneTarget(parent.getRelationship(), object, true);
}
};
}
}
class ObjectRelator {
void relateToParent(DataObject object) {
// do nothing
}
void relate(AgRelationship agRelationship, DataObject object, DataObject relatedObject) {
if (agRelationship.isToMany()) {
object.addToManyTarget(agRelationship.getName(), relatedObject, true);
} else {
object.setToOneTarget(agRelationship.getName(), relatedObject, true);
}
}
void unrelateAll(AgRelationship agRelationship, DataObject object) {
unrelateAll(agRelationship, object, null);
}
void unrelateAll(AgRelationship agRelationship, DataObject object, RelationshipUpdate relationshipUpdate) {
if (agRelationship.isToMany()) {
@SuppressWarnings("unchecked")
List<? extends DataObject> relatedObjects =
(List<? extends DataObject>) object.readProperty(agRelationship.getName());
for (int i = 0; i < relatedObjects.size(); i++) {
DataObject relatedObject = relatedObjects.get(i);
if (relationshipUpdate == null || !relationshipUpdate.containsRelatedObject(relatedObject)) {
object.removeToManyTarget(agRelationship.getName(), relatedObject, true);
i--;
} else {
relationshipUpdate.removeUpdateForRelatedObject(relatedObject);
}
}
} else {
object.setToOneTarget(agRelationship.getName(), null, true);
}
}
}
interface RelationshipUpdate {
boolean containsRelatedObject(DataObject o);
void removeUpdateForRelatedObject(DataObject o);
}
}
| |
package net.java.nboglpack.visualdesigner.shader.variables;
import java.awt.Component;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.Serializable;
import javax.swing.JFormattedTextField;
import javax.swing.JTextField;
import javax.swing.text.Document;
import javax.swing.text.NumberFormatter;
import javax.swing.text.PlainDocument;
public class DataType
{
// <editor-fold defaultstate="collapsed" desc=" static content ">
/* General data types indicies */
public static final int DATA_TYPE_BOOL_INDEX = 0;
public static final int DATA_TYPE_INT_INDEX = 1;
public static final int DATA_TYPE_FLOAT_INDEX = 2;
public static final int DATA_TYPE_MAT_INDEX = 3;
public static final int DATA_TYPE_SAMPLER_INDEX = 4;
public static final int DATA_TYPE_SAMPLER_SHADOW_INDEX = 5;
public static final int DATA_TYPE_SAMPLERCUBE_INDEX = 6;
/* General data types */
public static final int DATA_TYPE_VOID = 0;
public static final int DATA_TYPE_BOOL = 1 << DATA_TYPE_BOOL_INDEX;
public static final int DATA_TYPE_INT = 1 << DATA_TYPE_INT_INDEX;
public static final int DATA_TYPE_FLOAT = 1 << DATA_TYPE_FLOAT_INDEX;
public static final int DATA_TYPE_MAT = 1 << DATA_TYPE_MAT_INDEX;
public static final int DATA_TYPE_SAMPLER = 1 << DATA_TYPE_SAMPLER_INDEX;
public static final int DATA_TYPE_SAMPLER_SHADOW = 1 << DATA_TYPE_SAMPLER_SHADOW_INDEX;
public static final int DATA_TYPE_SAMPLERCUBE = 1 << DATA_TYPE_SAMPLERCUBE_INDEX;
public static final int DATA_TYPE_LOWEST_EXPLICIT_BIT = DATA_TYPE_BOOL_INDEX;
public static final int DATA_TYPE_HIGHEST_EXPLICIT_BIT = DATA_TYPE_SAMPLERCUBE_INDEX;
/* Data types Dimensions */
public static final int DATA_TYPE_DIMENSION1 = 1 << 10;
public static final int DATA_TYPE_DIMENSION2 = 1 << 11;
public static final int DATA_TYPE_DIMENSION3 = 1 << 12;
public static final int DATA_TYPE_DIMENSION4 = 1 << 13;
public static final int DATA_TYPE_LOWEST_DIMENSION_BIT = 10;
public static final int DATA_TYPE_HIGHEST_DIMENSION_BIT = 13;
/* Existing generic data types */
public static final int DATA_TYPE_BVEC1 = DATA_TYPE_BOOL | DATA_TYPE_DIMENSION1;
public static final int DATA_TYPE_BVEC2 = DATA_TYPE_BOOL | DATA_TYPE_DIMENSION2;
public static final int DATA_TYPE_BVEC3 = DATA_TYPE_BOOL | DATA_TYPE_DIMENSION3;
public static final int DATA_TYPE_BVEC4 = DATA_TYPE_BOOL | DATA_TYPE_DIMENSION4;
public static final int DATA_TYPE_IVEC1 = DATA_TYPE_INT | DATA_TYPE_DIMENSION1;
public static final int DATA_TYPE_IVEC2 = DATA_TYPE_INT | DATA_TYPE_DIMENSION2;
public static final int DATA_TYPE_IVEC3 = DATA_TYPE_INT | DATA_TYPE_DIMENSION3;
public static final int DATA_TYPE_IVEC4 = DATA_TYPE_INT | DATA_TYPE_DIMENSION4;
public static final int DATA_TYPE_VEC1 = DATA_TYPE_FLOAT | DATA_TYPE_DIMENSION1;
public static final int DATA_TYPE_VEC2 = DATA_TYPE_FLOAT | DATA_TYPE_DIMENSION2;
public static final int DATA_TYPE_VEC3 = DATA_TYPE_FLOAT | DATA_TYPE_DIMENSION3;
public static final int DATA_TYPE_VEC4 = DATA_TYPE_FLOAT | DATA_TYPE_DIMENSION4;
public static final int DATA_TYPE_MAT1 = DATA_TYPE_MAT | DATA_TYPE_DIMENSION1;
public static final int DATA_TYPE_MAT2 = DATA_TYPE_MAT | DATA_TYPE_DIMENSION2;
public static final int DATA_TYPE_MAT3 = DATA_TYPE_MAT | DATA_TYPE_DIMENSION3;
public static final int DATA_TYPE_MAT4 = DATA_TYPE_MAT | DATA_TYPE_DIMENSION4;
public static final int DATA_TYPE_SAMPLER1D = DATA_TYPE_SAMPLER | DATA_TYPE_DIMENSION1;
public static final int DATA_TYPE_SAMPLER2D = DATA_TYPE_SAMPLER | DATA_TYPE_DIMENSION2;
public static final int DATA_TYPE_SAMPLER3D = DATA_TYPE_SAMPLER | DATA_TYPE_DIMENSION3;
public static final int DATA_TYPE_SAMPLER1D_SHADOW = DATA_TYPE_SAMPLER_SHADOW | DATA_TYPE_DIMENSION1;
public static final int DATA_TYPE_SAMPLER2D_SHADOW = DATA_TYPE_SAMPLER_SHADOW | DATA_TYPE_DIMENSION2;
/* Common datatype variations for inbuild functions */
public static final int DATA_TYPE_VEC = DATA_TYPE_FLOAT | DATA_TYPE_DIMENSION2 | DATA_TYPE_DIMENSION3 | DATA_TYPE_DIMENSION4;
public static final int DATA_TYPE_IVEC = DATA_TYPE_INT | DATA_TYPE_DIMENSION2 | DATA_TYPE_DIMENSION3 | DATA_TYPE_DIMENSION4;
public static final int DATA_TYPE_BVEC = DATA_TYPE_BOOL | DATA_TYPE_DIMENSION2 | DATA_TYPE_DIMENSION3 | DATA_TYPE_DIMENSION4;
public static final int DATA_TYPE_GENTYPE = DATA_TYPE_VEC | DATA_TYPE_DIMENSION1;
/* Dimension Desciptions */
public static final String[] DATA_TYPE_DIMENSION_DESC_COORD = new String[] {"x", "y", "z", "w"};
public static final String[] DATA_TYPE_DIMENSION_DESC_COLOR = new String[] {"r", "g", "b", "a"};
public static final String[] DATA_TYPE_DIMENSION_DESC_TEXTURE = new String[] {"s", "t", "p", "q"};
public static String getDataTypeName(int dataType) {
switch(dataType) {
case DATA_TYPE_VOID:
return "Void";
case DATA_TYPE_BOOL:
return "Boolean";
case DATA_TYPE_INT:
return "Integer";
case DATA_TYPE_FLOAT:
return "Float";
case DATA_TYPE_MAT:
return "Matrix";
case DATA_TYPE_SAMPLER:
return "Sampler";
case DATA_TYPE_SAMPLER_SHADOW:
return "Shadow Sampler";
case DATA_TYPE_DIMENSION1:
return "One Dimentional";
case DATA_TYPE_DIMENSION2:
return "Two Dimentional";
case DATA_TYPE_DIMENSION3:
return "Three Dimentional";
case DATA_TYPE_DIMENSION4:
return "Four Dimentional";
case DATA_TYPE_SAMPLERCUBE:
return "Cube Sampler";
case DATA_TYPE_BVEC1:
return "One Dimensional Boolean Vector";
case DATA_TYPE_BVEC2:
return "Two Dimensional Boolean Vector";
case DATA_TYPE_BVEC3:
return "Three Dimensional Boolean Vector";
case DATA_TYPE_BVEC4:
return "Four Dimensional Boolean Vector";
case DATA_TYPE_IVEC1:
return "One Dimensional Integer Vector";
case DATA_TYPE_IVEC2:
return "Two Dimensional Integer Vector";
case DATA_TYPE_IVEC3:
return "Three Dimensional Integer Vector";
case DATA_TYPE_IVEC4:
return "Four Dimensional Integer Vector";
case DATA_TYPE_VEC1:
return "One Dimensional Float Vector";
case DATA_TYPE_VEC2:
return "Two Dimensional Float Vector";
case DATA_TYPE_VEC3:
return "Three Dimensional Float Vector";
case DATA_TYPE_VEC4:
return "Four Dimensional Float Vector";
case DATA_TYPE_MAT1:
return "One Dimensional Float Matrix";
case DATA_TYPE_MAT2:
return "Two Dimensional Float Matrix";
case DATA_TYPE_MAT3:
return "Three Dimensional Float Matrix";
case DATA_TYPE_MAT4:
return "Four Dimensional Float Matrix";
case DATA_TYPE_SAMPLER1D:
return "One Dimensional Sampler";
case DATA_TYPE_SAMPLER2D:
return "Two Dimensional Sampler";
case DATA_TYPE_SAMPLER3D:
return "Three Dimensional Sampler";
case DATA_TYPE_SAMPLER1D_SHADOW:
return "One Dimensional Shadow Sampler";
case DATA_TYPE_SAMPLER2D_SHADOW:
return "Two Dimensional Shadow Sampler";
case DATA_TYPE_VEC:
return "Two to Four Dimensional Float Vector";
case DATA_TYPE_IVEC:
return "Two to Four Dimensional Integer Vector";
case DATA_TYPE_BVEC:
return "Two to Four Dimensional Boolean Vector";
case DATA_TYPE_GENTYPE:
return "One to Four Dimensional Float Vector";
case DATA_TYPE_FLOAT | DATA_TYPE_INT | DATA_TYPE_BOOL:
return "Float, Integer or Boolean";
case DATA_TYPE_VEC | DATA_TYPE_IVEC | DATA_TYPE_BVEC:
return "One to Four Dimensional Float, Integer or Boolean Vector";
case DATA_TYPE_FLOAT | DATA_TYPE_INT | DATA_TYPE_BOOL | DATA_TYPE_DIMENSION2:
return "Two Dimensional Float, Integer or Boolean Vector";
case DATA_TYPE_FLOAT | DATA_TYPE_INT | DATA_TYPE_BOOL | DATA_TYPE_DIMENSION3:
return "Three Dimensional Float, Integer or Boolean Vector";
case DATA_TYPE_FLOAT | DATA_TYPE_INT | DATA_TYPE_BOOL | DATA_TYPE_DIMENSION4:
return "Four Dimensional Float, Integer or Boolean Vector";
case DATA_TYPE_SAMPLER | DATA_TYPE_SAMPLER_SHADOW | DATA_TYPE_SAMPLERCUBE | DATA_TYPE_DIMENSION1 | DATA_TYPE_DIMENSION2 | DATA_TYPE_DIMENSION3:
return "One to Three dimensional Sampler of any Type";
}
return "Undefined";
}
public static String getDataTypeShortName(int dataType) {
switch(dataType) {
case DATA_TYPE_VOID:
return "void";
case DATA_TYPE_BOOL:
return "bool";
case DATA_TYPE_INT:
return "int";
case DATA_TYPE_FLOAT:
return "float";
case DATA_TYPE_MAT:
return "mat";
case DATA_TYPE_SAMPLER:
return "sampler";
case DATA_TYPE_SAMPLER_SHADOW:
return "samplerShadow";
case DATA_TYPE_DIMENSION1:
return "dim1";
case DATA_TYPE_DIMENSION2:
return "dim2";
case DATA_TYPE_DIMENSION3:
return "dim3";
case DATA_TYPE_DIMENSION4:
return "dim4";
case DATA_TYPE_SAMPLERCUBE:
return "samplerCube";
case DATA_TYPE_BVEC1:
return "bool";
case DATA_TYPE_BVEC2:
return "bvec2";
case DATA_TYPE_BVEC3:
return "bvec3";
case DATA_TYPE_BVEC4:
return "bvec4";
case DATA_TYPE_IVEC1:
return "int";
case DATA_TYPE_IVEC2:
return "ivec2";
case DATA_TYPE_IVEC3:
return "ivec3";
case DATA_TYPE_IVEC4:
return "ivec4";
case DATA_TYPE_VEC1:
return "float";
case DATA_TYPE_VEC2:
return "vec2";
case DATA_TYPE_VEC3:
return "vec3";
case DATA_TYPE_VEC4:
return "vec4";
case DATA_TYPE_MAT1:
return "float";
case DATA_TYPE_MAT2:
return "mat2";
case DATA_TYPE_MAT3:
return "mat3";
case DATA_TYPE_MAT4:
return "mat4";
case DATA_TYPE_SAMPLER1D:
return "sampler1D";
case DATA_TYPE_SAMPLER2D:
return "sampler2D";
case DATA_TYPE_SAMPLER3D:
return "sampler3D";
case DATA_TYPE_SAMPLER1D_SHADOW:
return "sampler1DShadow";
case DATA_TYPE_SAMPLER2D_SHADOW:
return "sampler2DShadow";
case DATA_TYPE_VEC:
return "vec";
case DATA_TYPE_IVEC:
return "ivec";
case DATA_TYPE_BVEC:
return "bvec";
case DATA_TYPE_GENTYPE:
return "gentype";
}
return "Undefined";
}
/**
* Determines whether the given dataType and Dimension is generic or explicit
* @param dataType to 'analyze'
* @return True if dataType is explicit
* False if dataType is generic
*/
public static boolean isDataTypeExplicit(int dataType) {
return !(isDataTypeGeneric(dataType) || isDimensionGeneric(dataType));
}
/**
* Determines whether the given dataType is generic or explicit
* @param dataType to 'analyze'
* @return True if dataType is generic
* False if dataType is explicit
*/
public static boolean isDataTypeGeneric(int dataType) {
// check whether it's just one type int, bool, ...
return (countSetBits(dataType, 0, DATA_TYPE_HIGHEST_EXPLICIT_BIT) > 1);
}
/**
* Determines whether the given dataType Dimension is generic or explicit
* @param dataType to 'analyze'
* @return True if dataType Dimension is generic
* False if dataType Dimension is explicit
*/
public static boolean isDimensionGeneric(int dataType) {
// check whether it has more than one dimension
return (countSetBits(dataType, DATA_TYPE_HIGHEST_EXPLICIT_BIT + 1, DATA_TYPE_HIGHEST_DIMENSION_BIT) > 1);
}
/**
* Counts the number of set bits in an int number
* e.g. 0011101 would return 4
* 1000001 would return 2
* @param value Number where set bits will be counted
* @param bitStart startBit where counting starts
* @param bitEnd endBit where counting stops
*/
public static byte countSetBits(int value, int bitStart, int bitEnd) {
byte count = 0;
// bitEnd = Math.max(bitEnd, 32);
for (int i = bitStart; i <= bitEnd; i++)
if ((value & (1 << i)) > 0)
count++;
return count;
}
/**
* Gets dataType describing part only
*/
public static int getDataTypeOnly(int dataType) {
return ((1 << (DataType.DATA_TYPE_HIGHEST_EXPLICIT_BIT + 1)) - 1)
& dataType;
}
/**
* Gets dimension describing part only
*/
public static int getDimensionOnly(int dataType) {
return (0xFFFFFFFF ^
((1 << (DataType.DATA_TYPE_HIGHEST_EXPLICIT_BIT + 1)) - 1))
& dataType;
}
public static Component getEditingComponent(int dataType) {
JTextField textField = new JTextField();
return textField;
}
/**
* Returns the numer of dimensions of the given dataType.
* @return the numer of dimensions of the given dataType.
* returns 0 if dataType dimensions are generic
*/
public static int getDimensions(int dataType) {
switch (getDimensionOnly(dataType)) {
case DataType.DATA_TYPE_DIMENSION1: return 1;
case DataType.DATA_TYPE_DIMENSION2: return 2;
case DataType.DATA_TYPE_DIMENSION3: return 3;
case DataType.DATA_TYPE_DIMENSION4: return 4;
}
return 0;
}
/**
* Returns the dataType representing the given dimension
* @return the dataType representing the given dimension
* returns 0 if dimension is not a valid dataType
*/
public static int getDataTypeFromDimensions(int i) {
switch (i) {
case 1: return DataType.DATA_TYPE_DIMENSION1;
case 2: return DataType.DATA_TYPE_DIMENSION2;
case 3: return DataType.DATA_TYPE_DIMENSION3;
case 4: return DataType.DATA_TYPE_DIMENSION4;
}
return 0;
}
/**
* Returns the index of dataType
* @return the index of dataType
*/
public static int getDataTypeIndex(int dataType) {
switch (getDataTypeOnly(dataType)) {
case DataType.DATA_TYPE_BOOL: return DATA_TYPE_BOOL_INDEX;
case DataType.DATA_TYPE_INT: return DATA_TYPE_INT_INDEX;
case DataType.DATA_TYPE_FLOAT: return DATA_TYPE_FLOAT_INDEX;
case DataType.DATA_TYPE_MAT: return DATA_TYPE_MAT_INDEX;
case DataType.DATA_TYPE_SAMPLER: return DATA_TYPE_SAMPLER_INDEX;
case DataType.DATA_TYPE_SAMPLER_SHADOW: return DATA_TYPE_SAMPLER_SHADOW_INDEX;
case DataType.DATA_TYPE_SAMPLERCUBE: return DATA_TYPE_SAMPLERCUBE_INDEX;
}
return 0;
}
/**
* Checks whether a generic dataType supports a specific dataType
*
* @param genericDataType Genric dataType
* @param specificDataType this dataType is checked it's compatibility to
* the genericDataType
* @return true if the specificDataType is compatible to the genericDataType
*/
public static boolean isCompatible(int genericDataType, int specificDataType) {
return (((0xFFFFFFFF ^ genericDataType) & specificDataType) == 0);
}
// </editor-fold>
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.rocketmq.client;
import com.alibaba.rocketmq.common.MixAll;
import com.alibaba.rocketmq.common.UtilAll;
import com.alibaba.rocketmq.remoting.common.RemotingUtil;
/**
* Client Common configuration
*
* @author shijia.wxr
* @author vongosling
*/
public class ClientConfig {
public static final String SendMessageWithVIPChannelProperty = "com.rocketmq.sendMessageWithVIPChannel";
private String namesrvAddr = System.getProperty(MixAll.NAMESRV_ADDR_PROPERTY, System.getenv(MixAll.NAMESRV_ADDR_ENV));
private String clientIP = RemotingUtil.getLocalAddress();
private String instanceName = System.getProperty("rocketmq.client.name", "DEFAULT");
private int clientCallbackExecutorThreads = Runtime.getRuntime().availableProcessors();
/**
* Pulling topic information interval from the named server
*/
private int pollNameServerInteval = 1000 * 30;
/**
* Heartbeat interval in microseconds with message broker
*/
private int heartbeatBrokerInterval = 1000 * 30;
/**
* Offset persistent interval for consumer
*/
private int persistConsumerOffsetInterval = 1000 * 5;
private boolean unitMode = false;
private String unitName;
private boolean vipChannelEnabled = Boolean.parseBoolean(System.getProperty(SendMessageWithVIPChannelProperty, "true"));
;
public String buildMQClientId() {
StringBuilder sb = new StringBuilder();
sb.append(this.getClientIP());
sb.append("@");
sb.append(this.getInstanceName());
if (!UtilAll.isBlank(this.unitName)) {
sb.append("@");
sb.append(this.unitName);
}
return sb.toString();
}
public String getClientIP() {
return clientIP;
}
public void setClientIP(String clientIP) {
this.clientIP = clientIP;
}
public String getInstanceName() {
return instanceName;
}
public void setInstanceName(String instanceName) {
this.instanceName = instanceName;
}
public void changeInstanceNameToPID() {
if (this.instanceName.equals("DEFAULT")) {
this.instanceName = String.valueOf(UtilAll.getPid());
}
}
public void resetClientConfig(final ClientConfig cc) {
this.namesrvAddr = cc.namesrvAddr;
this.clientIP = cc.clientIP;
this.instanceName = cc.instanceName;
this.clientCallbackExecutorThreads = cc.clientCallbackExecutorThreads;
this.pollNameServerInteval = cc.pollNameServerInteval;
this.heartbeatBrokerInterval = cc.heartbeatBrokerInterval;
this.persistConsumerOffsetInterval = cc.persistConsumerOffsetInterval;
this.unitMode = cc.unitMode;
this.unitName = cc.unitName;
this.vipChannelEnabled = cc.vipChannelEnabled;
}
public ClientConfig cloneClientConfig() {
ClientConfig cc = new ClientConfig();
cc.namesrvAddr = namesrvAddr;
cc.clientIP = clientIP;
cc.instanceName = instanceName;
cc.clientCallbackExecutorThreads = clientCallbackExecutorThreads;
cc.pollNameServerInteval = pollNameServerInteval;
cc.heartbeatBrokerInterval = heartbeatBrokerInterval;
cc.persistConsumerOffsetInterval = persistConsumerOffsetInterval;
cc.unitMode = unitMode;
cc.unitName = unitName;
cc.vipChannelEnabled = vipChannelEnabled;
return cc;
}
public String getNamesrvAddr() {
return namesrvAddr;
}
public void setNamesrvAddr(String namesrvAddr) {
this.namesrvAddr = namesrvAddr;
}
public int getClientCallbackExecutorThreads() {
return clientCallbackExecutorThreads;
}
public void setClientCallbackExecutorThreads(int clientCallbackExecutorThreads) {
this.clientCallbackExecutorThreads = clientCallbackExecutorThreads;
}
public int getPollNameServerInteval() {
return pollNameServerInteval;
}
public void setPollNameServerInteval(int pollNameServerInteval) {
this.pollNameServerInteval = pollNameServerInteval;
}
public int getHeartbeatBrokerInterval() {
return heartbeatBrokerInterval;
}
public void setHeartbeatBrokerInterval(int heartbeatBrokerInterval) {
this.heartbeatBrokerInterval = heartbeatBrokerInterval;
}
public int getPersistConsumerOffsetInterval() {
return persistConsumerOffsetInterval;
}
public void setPersistConsumerOffsetInterval(int persistConsumerOffsetInterval) {
this.persistConsumerOffsetInterval = persistConsumerOffsetInterval;
}
public String getUnitName() {
return unitName;
}
public void setUnitName(String unitName) {
this.unitName = unitName;
}
public boolean isUnitMode() {
return unitMode;
}
public void setUnitMode(boolean unitMode) {
this.unitMode = unitMode;
}
public boolean isVipChannelEnabled() {
return vipChannelEnabled;
}
public void setVipChannelEnabled(final boolean vipChannelEnabled) {
this.vipChannelEnabled = vipChannelEnabled;
}
@Override
public String toString() {
return "ClientConfig [namesrvAddr=" + namesrvAddr + ", clientIP=" + clientIP + ", instanceName=" + instanceName
+ ", clientCallbackExecutorThreads=" + clientCallbackExecutorThreads + ", pollNameServerInteval=" + pollNameServerInteval
+ ", heartbeatBrokerInterval=" + heartbeatBrokerInterval + ", persistConsumerOffsetInterval="
+ persistConsumerOffsetInterval + ", unitMode=" + unitMode + ", unitName=" + unitName + ", vipChannelEnabled="
+ vipChannelEnabled + "]";
}
}
| |
/*
* Copyright 2022 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.postgres.dao;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.flywaydb.core.Flyway;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.flyway.FlywayAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.events.EventHandler;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.core.exception.ApplicationException;
import com.netflix.conductor.postgres.config.PostgresConfiguration;
import static com.netflix.conductor.core.exception.ApplicationException.Code.CONFLICT;
import static com.netflix.conductor.core.exception.ApplicationException.Code.NOT_FOUND;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
@ContextConfiguration(
classes = {
TestObjectMapperConfiguration.class,
PostgresConfiguration.class,
FlywayAutoConfiguration.class
})
@RunWith(SpringRunner.class)
@SpringBootTest
public class PostgresMetadataDAOTest {
@Autowired private PostgresMetadataDAO metadataDAO;
@Rule public TestName name = new TestName();
@Autowired Flyway flyway;
// clean the database between tests.
@Before
public void before() {
flyway.clean();
flyway.migrate();
}
@Test
public void testDuplicateWorkflowDef() {
WorkflowDef def = new WorkflowDef();
def.setName("testDuplicate");
def.setVersion(1);
metadataDAO.createWorkflowDef(def);
ApplicationException applicationException =
assertThrows(ApplicationException.class, () -> metadataDAO.createWorkflowDef(def));
assertEquals(
"Workflow with testDuplicate.1 already exists!", applicationException.getMessage());
assertEquals(CONFLICT, applicationException.getCode());
}
@Test
public void testRemoveNotExistingWorkflowDef() {
ApplicationException applicationException =
assertThrows(
ApplicationException.class, () -> metadataDAO.removeWorkflowDef("test", 1));
assertEquals(
"No such workflow definition: test version: 1", applicationException.getMessage());
assertEquals(NOT_FOUND, applicationException.getCode());
}
@Test
public void testWorkflowDefOperations() {
WorkflowDef def = new WorkflowDef();
def.setName("test");
def.setVersion(1);
def.setDescription("description");
def.setCreatedBy("unit_test");
def.setCreateTime(1L);
def.setOwnerApp("ownerApp");
def.setUpdatedBy("unit_test2");
def.setUpdateTime(2L);
metadataDAO.createWorkflowDef(def);
List<WorkflowDef> all = metadataDAO.getAllWorkflowDefs();
assertNotNull(all);
assertEquals(1, all.size());
assertEquals("test", all.get(0).getName());
assertEquals(1, all.get(0).getVersion());
WorkflowDef found = metadataDAO.getWorkflowDef("test", 1).get();
assertTrue(EqualsBuilder.reflectionEquals(def, found));
def.setVersion(3);
metadataDAO.createWorkflowDef(def);
all = metadataDAO.getAllWorkflowDefs();
assertNotNull(all);
assertEquals(2, all.size());
assertEquals("test", all.get(0).getName());
assertEquals(1, all.get(0).getVersion());
found = metadataDAO.getLatestWorkflowDef(def.getName()).get();
assertEquals(def.getName(), found.getName());
assertEquals(def.getVersion(), found.getVersion());
assertEquals(3, found.getVersion());
all = metadataDAO.getAllLatest();
assertNotNull(all);
assertEquals(1, all.size());
assertEquals("test", all.get(0).getName());
assertEquals(3, all.get(0).getVersion());
all = metadataDAO.getAllVersions(def.getName());
assertNotNull(all);
assertEquals(2, all.size());
assertEquals("test", all.get(0).getName());
assertEquals("test", all.get(1).getName());
assertEquals(1, all.get(0).getVersion());
assertEquals(3, all.get(1).getVersion());
def.setDescription("updated");
metadataDAO.updateWorkflowDef(def);
found = metadataDAO.getWorkflowDef(def.getName(), def.getVersion()).get();
assertEquals(def.getDescription(), found.getDescription());
List<String> allnames = metadataDAO.findAll();
assertNotNull(allnames);
assertEquals(1, allnames.size());
assertEquals(def.getName(), allnames.get(0));
def.setVersion(2);
metadataDAO.createWorkflowDef(def);
found = metadataDAO.getLatestWorkflowDef(def.getName()).get();
assertEquals(def.getName(), found.getName());
assertEquals(3, found.getVersion());
metadataDAO.removeWorkflowDef("test", 3);
Optional<WorkflowDef> deleted = metadataDAO.getWorkflowDef("test", 3);
assertFalse(deleted.isPresent());
found = metadataDAO.getLatestWorkflowDef(def.getName()).get();
assertEquals(def.getName(), found.getName());
assertEquals(2, found.getVersion());
metadataDAO.removeWorkflowDef("test", 1);
deleted = metadataDAO.getWorkflowDef("test", 1);
assertFalse(deleted.isPresent());
found = metadataDAO.getLatestWorkflowDef(def.getName()).get();
assertEquals(def.getName(), found.getName());
assertEquals(2, found.getVersion());
}
@Test
public void testTaskDefOperations() {
TaskDef def = new TaskDef("taskA");
def.setDescription("description");
def.setCreatedBy("unit_test");
def.setCreateTime(1L);
def.setInputKeys(Arrays.asList("a", "b", "c"));
def.setOutputKeys(Arrays.asList("01", "o2"));
def.setOwnerApp("ownerApp");
def.setRetryCount(3);
def.setRetryDelaySeconds(100);
def.setRetryLogic(TaskDef.RetryLogic.FIXED);
def.setTimeoutPolicy(TaskDef.TimeoutPolicy.ALERT_ONLY);
def.setUpdatedBy("unit_test2");
def.setUpdateTime(2L);
metadataDAO.createTaskDef(def);
TaskDef found = metadataDAO.getTaskDef(def.getName());
assertTrue(EqualsBuilder.reflectionEquals(def, found));
def.setDescription("updated description");
metadataDAO.updateTaskDef(def);
found = metadataDAO.getTaskDef(def.getName());
assertTrue(EqualsBuilder.reflectionEquals(def, found));
assertEquals("updated description", found.getDescription());
for (int i = 0; i < 9; i++) {
TaskDef tdf = new TaskDef("taskA" + i);
metadataDAO.createTaskDef(tdf);
}
List<TaskDef> all = metadataDAO.getAllTaskDefs();
assertNotNull(all);
assertEquals(10, all.size());
Set<String> allnames = all.stream().map(TaskDef::getName).collect(Collectors.toSet());
assertEquals(10, allnames.size());
List<String> sorted = allnames.stream().sorted().collect(Collectors.toList());
assertEquals(def.getName(), sorted.get(0));
for (int i = 0; i < 9; i++) {
assertEquals(def.getName() + i, sorted.get(i + 1));
}
for (int i = 0; i < 9; i++) {
metadataDAO.removeTaskDef(def.getName() + i);
}
all = metadataDAO.getAllTaskDefs();
assertNotNull(all);
assertEquals(1, all.size());
assertEquals(def.getName(), all.get(0).getName());
}
@Test
public void testRemoveNotExistingTaskDef() {
ApplicationException applicationException =
assertThrows(
ApplicationException.class,
() -> metadataDAO.removeTaskDef("test" + UUID.randomUUID().toString()));
assertEquals("No such task definition", applicationException.getMessage());
assertEquals(NOT_FOUND, applicationException.getCode());
}
@Test
public void testEventHandlers() {
String event1 = "SQS::arn:account090:sqstest1";
String event2 = "SQS::arn:account090:sqstest2";
EventHandler eventHandler = new EventHandler();
eventHandler.setName(UUID.randomUUID().toString());
eventHandler.setActive(false);
EventHandler.Action action = new EventHandler.Action();
action.setAction(EventHandler.Action.Type.start_workflow);
action.setStart_workflow(new EventHandler.StartWorkflow());
action.getStart_workflow().setName("workflow_x");
eventHandler.getActions().add(action);
eventHandler.setEvent(event1);
metadataDAO.addEventHandler(eventHandler);
List<EventHandler> all = metadataDAO.getAllEventHandlers();
assertNotNull(all);
assertEquals(1, all.size());
assertEquals(eventHandler.getName(), all.get(0).getName());
assertEquals(eventHandler.getEvent(), all.get(0).getEvent());
List<EventHandler> byEvents = metadataDAO.getEventHandlersForEvent(event1, true);
assertNotNull(byEvents);
assertEquals(0, byEvents.size()); // event is marked as in-active
eventHandler.setActive(true);
eventHandler.setEvent(event2);
metadataDAO.updateEventHandler(eventHandler);
all = metadataDAO.getAllEventHandlers();
assertNotNull(all);
assertEquals(1, all.size());
byEvents = metadataDAO.getEventHandlersForEvent(event1, true);
assertNotNull(byEvents);
assertEquals(0, byEvents.size());
byEvents = metadataDAO.getEventHandlersForEvent(event2, true);
assertNotNull(byEvents);
assertEquals(1, byEvents.size());
}
}
| |
/*
AODV Overlay v0.5.3 Copyright 2007-2010 Lancaster University
Rajiv Ramdhany
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package net;
import interfaces.IAODVMsgProcessing.IAODVMsgProcessing;
import interfaces.IConfigInfo.IConfigInfo;
import interfaces.IControl.IControl;
import interfaces.ILog.ILog;
import interfaces.IOSOperations.IOSOperations;
import interfaces.IRouteDiscovery.IRouteDiscovery;
import interfaces.IState.IAodvState;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Proxy;
import java.net.InetAddress;
import java.util.LinkedList;
import jpcap.JpcapCaptor;
import jpcap.NetworkInterface;
import jpcap.PacketReceiver;
import jpcap.packet.EthernetPacket;
import jpcap.packet.IPPacket;
import jpcap.packet.Packet;
import jpcap.packet.UDPPacket;
import log.Logging;
import msg.AODVMessage;
import msg.IPPkt;
import msg.RERR;
import msg.RREP;
import msg.RREPACK;
import msg.RREQ;
import OpenCOM.Delegator;
import OpenCOM.IConnections;
import OpenCOM.ILifeCycle;
import OpenCOM.IMetaInterface;
import OpenCOM.IUnknown;
import OpenCOM.OCM_SingleReceptacle;
import OpenCOM.OpenCOMComponent;
import aodvstate.ConfigInfo;
import exceptions.RoutingCFException;
/**
* This class handles the threads related to listening for packets and processing
* the received packets. There are 3 threads,
* 1. thread to capture packets on the LO interface
* 2. thread to capture packets that traverse the AODV capable
* network interface (eg. eth1, eth2, etc.)
* 3. thread to process that packets that are placed in a queue by
* the previous threads
*
* There can be 3 types of actions that the processing thread can request
* 1. A packet that was received on network interface and is a UDP packet
* with destination port 654 - this means an AODV message that needs
to be processed
* 2. A packet that was picked up by the LO interface that does not
* contain the "127.." address or the destination MAC address is 00:00:00:00:00:00
* - this means this destination IP Address of this packet has no route made,
* therefore requires a route discovery to be initiatedn
* 3. Any other packet on the network interface - this means it is a packet
* that uses a route, so update the route lifetimes
*
* @author : Rajiv Ramdhany
* @date : 28-jul-2007
* @email : r.ramdhany@lancaster.ac.uk
*
* @modification-history-ver-0.11
* @author : Rajiv Ramdhany
* @date : 30-nov-2007
* @email : r.ramdhany@lancaster.ac.uk
* @modification - Changed the class to process packets of LO interface, for the
* purpose of getting packets that require routes from the LO
* interface
*
*/
public class PacketListenerLinux extends OpenCOMComponent implements Runnable, IUnknown,
IMetaInterface, ILifeCycle, IConnections {
public OCM_SingleReceptacle<IConfigInfo> m_PSR_IConfigInfo;
public OCM_SingleReceptacle<IControl> m_PSR_IAodvControl;
public OCM_SingleReceptacle<IAodvState> m_PSR_IAodvState;
public OCM_SingleReceptacle<ILog> m_PSR_ILog;
public OCM_SingleReceptacle<IOSOperations> m_PSR_IOSOperations;
private LinkedList pktQueue;
private IPPacket ipPacket;
private UDPPacket udpPkt;
private EthernetPacket ethPkt;
private IPPkt ipPkt;
private RREQ rreqMsg;
private RREP rrepMsg;
private RERR rerrMsg;
private RREPACK rrepackMsg;
private InetAddress aodvMsgSrcIPAddr;
private byte ipAddr[];
private boolean updateForRouteUse;
public ConfigInfo cfgInfo;
/**
* Constructor to create th
*/
public PacketListenerLinux(IUnknown pRuntime) {
super(pRuntime);
pktQueue = new LinkedList();
m_PSR_IConfigInfo = new OCM_SingleReceptacle<IConfigInfo>(IConfigInfo.class);
m_PSR_IAodvControl = new OCM_SingleReceptacle<IControl>(IControl.class);
m_PSR_IAodvState = new OCM_SingleReceptacle<IAodvState>(IAodvState.class);
m_PSR_ILog = new OCM_SingleReceptacle<ILog>(ILog.class);
m_PSR_IOSOperations = new OCM_SingleReceptacle<IOSOperations>(IOSOperations.class);
}
public void run() {
PacketInfo pktInfo;
synchronized(pktQueue) {
pktQueue.clear();
}
(new NetIfcPacketQueueBuilder()).start();
(new LoIfcPacketQueueBuilder()).start();
while(true) {
try {
// if route inactive, stop the thread
if(!(cfgInfo.RouterActive)) {
return;
}
try {
synchronized(pktQueue) {
pktInfo = (PacketInfo) pktQueue.removeFirst();
}
} catch(Exception e) {
pktInfo = null;
}
// stay a little while if no packets in the queue
if(pktInfo == null) {
//sleep(500);
continue;
}
//if packet is not IP, don't do anything
if(!(pktInfo.packet instanceof IPPacket)) {
continue;
}
ipPacket = (IPPacket) pktInfo.packet;
// Only packets of the given IP version used
if(ipPacket.version != cfgInfo.ipVersionVal) {
continue;
}
// process packet according to type
if(pktInfo.ifcType == PacketInfo.PACKET_REQUIRES_ROUTE) {
processRouteRequiredPacket();
} else {
processNetIfcPacket();
}
} catch(Exception e) {
m_PSR_ILog.m_pIntf.write(Logging.CRITICAL_LOGGING,
"Packet Handler - Problem in loop - " + e);
return;
}
}
}
/**
* Method to process a packet that requires routes
*
* @exception Exception - thrown when errors occur
*/
void processRouteRequiredPacket() throws Exception {
if (cfgInfo==null)
throw new RoutingCFException(RoutingCFException.NO_CONFIGINFO_CONNECTED);
// if the packet contains src or dest IP addr as "127..." then
// do not start a route discovery
if(ipPacket.dst_ip.getHostAddress().startsWith("127.")
|| ipPacket.src_ip.getHostAddress().startsWith("127.")) {
return;
}
// if the originator of packet is not me, then start local repair
if(!(InetAddress.getByName(ipPacket.src_ip.getHostAddress()).equals(cfgInfo.ipAddressVal))) {
// no local repair implemented
return;
}
// else, this is a packet the requires a route
IRouteDiscovery controlComp = (IRouteDiscovery) m_PSR_IAodvControl.m_pIntf;
ipPkt = new IPPkt(cfgInfo, m_PSR_IAodvState.m_pIntf,
ipPacket, cfgInfo.ifaceNameVal);
controlComp.processRouteDiscovery(ipPkt);
}
/**
* Method to process a packet that has come on the network interface
* on which AODV is being supported.
*
* @exception Exception - thrown when errors occur
*/
void processNetIfcPacket() throws Exception {
updateForRouteUse = true;
if (cfgInfo==null)
throw new RoutingCFException(RoutingCFException.NO_CONFIGINFO_CONNECTED);
IAodvState pStateComp = m_PSR_IAodvState.m_pIntf;
IAODVMsgProcessing controlComp = (IAODVMsgProcessing) m_PSR_IAodvControl.m_pIntf;
// if a packet is UDP, has dest port 654 and is not originating
// from your own machine, then this packet is an AODV message
// that was received by your own machine
if(ipPacket instanceof UDPPacket) {
udpPkt = (UDPPacket) ipPacket;
if(udpPkt.dst_port == AODVMessage.AODV_PORT) {
aodvMsgSrcIPAddr = InetAddress.getByName(udpPkt.src_ip.getHostAddress());
if(!aodvMsgSrcIPAddr.equals(cfgInfo.ipAddressVal)) {
// create msg based on type and call process function
switch(udpPkt.data[0]) {
case AODVMessage.AODV_RREQ_MSG_CODE:
rreqMsg = new RREQ(cfgInfo,
pStateComp,
udpPkt,
cfgInfo.ifaceNameVal);
controlComp.processAODVMsgRREQ(rreqMsg);
break;
case AODVMessage.AODV_RREP_MSG_CODE:
rrepMsg = new RREP(cfgInfo,
pStateComp,
udpPkt,
cfgInfo.ifaceNameVal);
// if the RREP is a HELLO, the process differently
if(rrepMsg.fromIPAddr.equals(rrepMsg.origIPAddr)
&& rrepMsg.origIPAddr.equals(rrepMsg.destIPAddr)) {
controlComp.processAODVMsgHELLO(rrepMsg);
updateForRouteUse = false;
} else {
controlComp.processAODVMsgRREP(rrepMsg);
}
break;
case AODVMessage.AODV_RERR_MSG_CODE:
rerrMsg = new RERR(cfgInfo,
pStateComp,
udpPkt,
cfgInfo.ifaceNameVal);
controlComp.processAODVMsgRERR(rerrMsg);
break;
case AODVMessage.AODV_RREPACK_MSG_CODE:
rrepackMsg = new RREPACK(cfgInfo,
pStateComp,
udpPkt,
cfgInfo.ifaceNameVal);
controlComp.processAODVMsgRREPACK(rrepackMsg);
break;
}
}
}
}
// for certain AODV messages, this update is not done
if(updateForRouteUse) {
// if it is not a AODV msg, then it means a packet which
// is using an existing route
ipPkt = new IPPkt(cfgInfo, pStateComp,
ipPacket, cfgInfo.ifaceNameVal);
controlComp.processExistingRouteUse(ipPkt);
}
}
/**
* This inner class defines the information that is placed in the
* packet queue.
*/
public class PacketInfo {
// values for ifcType
public static final int PACKET_REQUIRES_ROUTE = 1;
public static final int PACKET_IS_NORMAL = 2;
public int ifcType;
public Packet packet;
public PacketInfo(int ift, Packet pkt) {
ifcType = ift;
packet = pkt;
}
}
/**
* This inner class processes the thread that listens to the network interface
* to extract packets
*/
public class NetIfcPacketQueueBuilder extends Thread implements PacketReceiver {
public void run() {
try {
// choose network interface
NetworkInterface[] devList=JpcapCaptor.getDeviceList();
NetworkInterface prefDev = null; // preferred network interface
for(int i=0; i< devList.length; i++)
{
if (devList[i].name.equalsIgnoreCase(cfgInfo.ifaceName))
{
prefDev = devList[i];
break;
}
}
JpcapCaptor jpcaptor = JpcapCaptor.openDevice(prefDev, 4096, false, 20);
jpcaptor.loopPacket(-1, this);
} catch(Exception e) {
// log
//m_PSR_ILog.m_pIntf.write(ILog.CRITICAL_LOGGING,
//"Net Ifc Packet Queue Builder - Problem in run - " + e);
}
}
public void receivePacket(Packet pkt) {
synchronized(pktQueue) {
pktQueue.addLast(new PacketInfo(PacketInfo.PACKET_IS_NORMAL,
pkt));
}
}
}
/**
* This inner class processes the thread that listens to the LO interface
* to extract packets
*/
public class LoIfcPacketQueueBuilder extends Thread implements PacketReceiver {
public void run() {
try {
// choose network interface
NetworkInterface[] devList=JpcapCaptor.getDeviceList();
NetworkInterface prefDev = null; // preferred network interface
for(int i=0; i< devList.length; i++)
{
if (devList[i].name.equalsIgnoreCase(cfgInfo.loIfaceName))
{
prefDev = devList[i];
break;
}
}
JpcapCaptor jpcaptor = JpcapCaptor.openDevice(prefDev, 4096, false, 20);
jpcaptor.loopPacket(-1, this);
} catch(Exception e) {
// log
//m_PSR_ILog.m_pIntf.write(ILog.CRITICAL_LOGGING,
//"Net Ifc Packet Queue Builder - Problem in run - " + e);
}
}
public void receivePacket(Packet pkt) {
synchronized(pktQueue) {
pktQueue.addLast(new PacketInfo(PacketInfo.PACKET_REQUIRES_ROUTE, pkt));
}
}
}
// -------------------- ILifecycle interface ---------------------
public boolean startup(Object data) {
return false;
}
public boolean shutdown() {
return false;
}
// ---------------------- IConnections Interface -------------------
public boolean connect(IUnknown pSinkIntf, String riid, long provConnID) {
if(riid.toString().equalsIgnoreCase("interfaces.IConfigInfo.IConfigInfo")){
return m_PSR_IConfigInfo.connectToRecp(pSinkIntf, riid, provConnID);
}
if(riid.toString().equalsIgnoreCase("interfaces.ILog.ILog")){
return m_PSR_ILog.connectToRecp(pSinkIntf, riid, provConnID);
}
if(riid.toString().equalsIgnoreCase("interfaces.IState.IAodvState")){
return m_PSR_IAodvState.connectToRecp(pSinkIntf, riid, provConnID);
}
if(riid.toString().equalsIgnoreCase("interfaces.IControl.IControl")){
return m_PSR_IAodvControl.connectToRecp(pSinkIntf, riid, provConnID);
}
if(riid.toString().equalsIgnoreCase("interfaces.IOSOperations.IOSOperations")){
return m_PSR_IOSOperations.connectToRecp(pSinkIntf, riid, provConnID);
}
return false;
}
public boolean disconnect(String riid, long connID) {
if(riid.toString().equalsIgnoreCase("interfaces.IConfigInfo.IConfigInfo")){
return m_PSR_IConfigInfo.disconnectFromRecp(connID);
}
if(riid.toString().equalsIgnoreCase("interfaces.ILog.ILog")){
return m_PSR_ILog.disconnectFromRecp(connID);
}
if(riid.toString().equalsIgnoreCase("interfaces.IState.IAodvState")){
return m_PSR_IAodvState.disconnectFromRecp(connID);
}
if(riid.toString().equalsIgnoreCase("interfaces.IControl.IControl")){
return m_PSR_IAodvControl.disconnectFromRecp(connID);
}
if(riid.toString().equalsIgnoreCase("interfaces.IOSOperations.IOSOperations")){
return m_PSR_IOSOperations.disconnectFromRecp(connID);
}
return false;
}
// --------------------- additional OpenCOM methods -----------------
public Object getConnectedSinkComp(OCM_SingleReceptacle pSR)
{
if (pSR.m_pIntf instanceof Proxy) {
Proxy objProxy = (Proxy) pSR.m_pIntf;
InvocationHandler delegatorIVh = Proxy.getInvocationHandler(objProxy);
if (delegatorIVh instanceof Delegator) {
return((Delegator) delegatorIVh).obj;
}
}
return null;
}
}
| |
package cz.metacentrum.perun.core.impl;
import cz.metacentrum.perun.core.api.ExtSource;
import cz.metacentrum.perun.core.api.GroupsManager;
import cz.metacentrum.perun.core.api.exceptions.ExtSourceUnsupportedOperationException;
import cz.metacentrum.perun.core.api.exceptions.InternalErrorException;
import cz.metacentrum.perun.core.api.exceptions.SubjectNotExistsException;
import cz.metacentrum.perun.core.blImpl.PerunBlImpl;
import cz.metacentrum.perun.core.implApi.ExtSourceApi;
import com.fasterxml.jackson.annotation.JsonIgnore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLSocketFactory;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author Michal Stava stavamichal@gmail.com
*/
public class ExtSourceXML extends ExtSource implements ExtSourceApi {
private final static Logger log = LoggerFactory.getLogger(ExtSourceXML.class);
private String query = null;
private String loginQuery = null;
private String file = null;
private String uri = null;
private static PerunBlImpl perunBl;
// filled by spring (perun-core.xml)
public static PerunBlImpl setPerunBlImpl(PerunBlImpl perun) {
perunBl = perun;
return perun;
}
//URL connection
private HttpURLConnection con = null;
//Pattern for looking replacement in regex string
private final Pattern pattern = Pattern.compile("([^\\\\]|^)(\\\\\\\\)*/([^\\\\]|$)");
@Override
public List<Map<String,String>> findSubjectsLogins(String searchString) throws ExtSourceUnsupportedOperationException {
return findSubjectsLogins(searchString, 0);
}
@Override
public List<Map<String,String>> findSubjectsLogins(String searchString, int maxResulsts) throws ExtSourceUnsupportedOperationException {
throw new ExtSourceUnsupportedOperationException("For XML is using this method not optimized, use findSubjects instead.");
}
@Override
public List<Map<String,String>> findSubjects(String searchString) throws InternalErrorException {
return findSubjects(searchString, 0);
}
@Override
public List<Map<String,String>> findSubjects(String searchString, int maxResults) throws InternalErrorException {
//prepare string for xpath (use concat for chars ' and ")
searchString = convertToXpathSearchString(searchString);
//Get Query attribute from extSources.xml config file
query = getAttributes().get("xpath");
if (query == null || query.isEmpty()) {
throw new InternalErrorException("query attributes is required");
}
//Replace '?' by searchString
if(searchString == null) {
throw new InternalErrorException("search string can't be null");
}
query = query.replaceAll("\\?", searchString);
//Get file or uri of xml
prepareEnvironment();
return xpathParsing(query, maxResults);
}
@Override
public Map<String, String> getSubjectByLogin(String login) throws InternalErrorException, SubjectNotExistsException {
//prepare string for xpath (use concat for chars ' and ")
login = convertToXpathSearchString(login);
//Get Query attribute from extSources.xml config file
query = getAttributes().get("loginXpath");
if (query == null || query.isEmpty()) {
throw new InternalErrorException("query attributes is required");
}
//Replace '?' by searchString
if(login == null || login.isEmpty()) {
throw new InternalErrorException("login string can't be null or empty");
}
query = query.replaceAll("\\?", login);
//Get file or uri of xml
prepareEnvironment();
List<Map<String, String>> subjects = this.xpathParsing(query, 0);
if (subjects.size() > 1) {
throw new SubjectNotExistsException("There are more than one results for the login: " + login);
}
if (subjects.size() == 0) {
throw new SubjectNotExistsException(login);
}
return subjects.get(0);
}
@Override
public List<Map<String, String>> getGroupSubjects(Map<String, String> attributes) throws InternalErrorException {
// Get the query for the group subjects
String queryForGroup = attributes.get(GroupsManager.GROUPMEMBERSQUERY_ATTRNAME);
//If there is no query for group, throw exception
if(queryForGroup == null) throw new InternalErrorException("Attribute " + GroupsManager.GROUPMEMBERSQUERY_ATTRNAME + " can't be null.");
//Get file or uri of xml
prepareEnvironment();
return xpathParsing(queryForGroup, 0);
}
@Override
public List<Map<String, String>> getUsersSubjects() throws ExtSourceUnsupportedOperationException {
throw new ExtSourceUnsupportedOperationException();
}
protected void prepareEnvironment() throws InternalErrorException {
//Get file or uri of xml
file = getAttributes().get("file");
if(file == null || file.isEmpty()) {
file = null;
uri = getAttributes().get("uri");
if(uri == null || uri.isEmpty()) {
throw new InternalErrorException("File and uri are both empty, one must exists!.");
}
}
}
/**
* Get query and maxResults.
* Prepare document and xpathExpression by query.
* Get all nodes by xpath from document and parse them one by one.
*
* The way of xml take from "file" or "uri" (configuration file)
*
* @param query xpath query from config file
* @param maxResults never get more than maxResults results (0 mean unlimited)
*
* @return List of results, where result is Map<String,String> like <name, value>
* @throws InternalErrorException
*/
protected List<Map<String,String>> xpathParsing(String query, int maxResults) throws InternalErrorException {
//Prepare result list
List<Map<String, String>> subjects = new ArrayList<>();
//Create new document factory builder
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder;
try {
builder = factory.newDocumentBuilder();
} catch (ParserConfigurationException ex) {
throw new InternalErrorException("Error when creating newDocumentBuilder.", ex);
}
Document doc;
try {
if(file != null && !file.isEmpty()) {
doc = builder.parse(file);
} else if(uri != null && !uri.isEmpty()) {
doc = builder.parse(this.createTwoWaySSLConnection(uri));
} else {
throw new InternalErrorException("Document can't be parsed, because there is no way (file or uri) to this document in xpathParser.");
}
} catch (SAXParseException ex) {
throw new InternalErrorException("Error when parsing uri by document builder.", ex);
} catch (SAXException ex) {
throw new InternalErrorException("Problem with parsing is more complex, not only invalid characters.", ex);
} catch (IOException ex) {
throw new InternalErrorException("Error when parsing uri by document builder. Problem with input or output.", ex);
}
//Prepare xpath expression
XPathFactory xPathfactory = XPathFactory.newInstance();
XPath xpath = xPathfactory.newXPath();
XPathExpression queryExpr;
try {
queryExpr = xpath.compile(query);
} catch (XPathExpressionException ex) {
throw new InternalErrorException("Error when compiling xpath query.", ex);
}
//Call query on document node and get back nodesets
NodeList nodeList;
try {
nodeList = (NodeList) queryExpr.evaluate(doc, XPathConstants.NODESET);
} catch (XPathExpressionException ex) {
throw new InternalErrorException("Error when evaluate xpath query on document.", ex);
}
//Test if there is any nodeset in result
if(nodeList.getLength() == 0) {
//There is no results, return empty subjects
return subjects;
}
//Iterate through nodes and convert them to Map<String,String>
for(int i=0; i<nodeList.getLength(); i++) {
Node singleNode = nodeList.item(i);
// remove node from original structure in order to keep access time constant (otherwise is exp.)
singleNode.getParentNode().removeChild(singleNode);
Map<String,String> map = convertNodeToMap(singleNode);
if(map != null) subjects.add(map);
//Reducing results by maxResults
if(maxResults > 0) {
if(subjects.size() >= maxResults) break;
}
}
this.close();
return subjects;
}
/**
* Get XML node and convert all values by "xmlMapping" attribute to Map<String,String>
* In map there are "name=value" data.
*
* Attribute xmlMapping is from file perun-extSource.xml
*
* @param node node for converting
* @return Map<String,String> like <name,value>
* @throws InternalErrorException
*/
protected Map<String, String> convertNodeToMap(Node node) throws InternalErrorException {
Map<String,String> nodeInMap = new HashMap<>();
//If node is empty, return null
if(node == null) return null;
String mapping = getAttributes().get("xmlMapping");
String[] mappingArray = mapping.split(",\n");
for (String s : mappingArray) {
String attr = s.trim();
int index = attr.indexOf("=");
if (index <= 0)
throw new InternalErrorException("There is no text in xmlMapping attribute or there is no '=' character.");
String name = attr.substring(0, index);
String value = attr.substring(index + 1);
if (value.startsWith("#")) {
value = value.substring(1);
String[] regexAndXpath = value.split("#");
if (regexAndXpath.length != 2)
throw new InternalErrorException("There is not only 2 parts (regex and XpathExpression). There are " + regexAndXpath.length + " parts.");
value = extractValueByRegex(getValueFromXpath(node, regexAndXpath[1]), regexAndXpath[0]);
} else {
value = getValueFromXpath(node, value);
}
nodeInMap.put(name.trim(), value.trim());
}
return nodeInMap;
}
/**
* Get xml Node and xpath expression to get value from node by this xpath.
*
* @param node node for getting value from
* @param xpathExpression expression for xpath to looking for value in node
* @return string extracted from node by xpath
* @throws InternalErrorException
*/
protected String getValueFromXpath(Node node, String xpathExpression) throws InternalErrorException {
//Prepare xpath expression
XPathFactory xPathfactory = XPathFactory.newInstance();
XPath xpath = xPathfactory.newXPath();
XPathExpression expr;
try {
expr = xpath.compile(xpathExpression);
} catch (XPathExpressionException ex) {
throw new InternalErrorException("Error when compiling xpath query.", ex);
}
String text;
try {
text = (String) expr.evaluate(node, XPathConstants.STRING);
} catch (XPathExpressionException ex) {
throw new InternalErrorException("Error when evaluate xpath query on node.", ex);
}
return text;
}
/**
* Get regex in format 'regex/replacement' and value to get data from.
* Use regex and replacement to get data from value.
*
* IMPORTANT: Regex must be always in format 'regex/replacement' and must have
* exactly 1 existence of character '/' ex. '[abc](a)[b]/$1'
*
* @param value some string
* @param regex regex in format 'regex/replacement'
* @return extracted string from value by regex
*
* @throws InternalErrorException
*/
protected String extractValueByRegex(String value, String regex) throws InternalErrorException {
//trim value to erase newlines and spaces before and after value
value = value.trim();
//regex need to be separate to 2 parts (regex) and (replacement) separated by backslash - ex 'regex/replacement'
Matcher match = pattern.matcher(regex);
//need to separate regex to regexPart and replacementPart
String regexPart;
String replacementPart;
if(match.find()) {
int i = match.end();
if(match.find()) throw new InternalErrorException("There is more then one separating forward slash in regex without escaping.");
while(regex.charAt(i) != '/') {
i--;
if(i < 0) throw new InternalErrorException("Index of forward slash not found.");
}
regexPart = regex.substring(0, i);
replacementPart = regex.substring(i+1);
} else {
throw new InternalErrorException("There is no replacement in regex.");
}
//use regex and replacement to get string from value
value = value.replaceAll(regexPart, replacementPart);
return value;
}
/**
* Get https uri of xml document and create two way ssl connection using truststore and keystore.
*
* @param uri https uri to xml document
* @return input stream with xml document
*
* @throws IOException if there is some input/output error
* @throws InternalErrorException if some variables are not correctly filled
*/
protected InputStream createTwoWaySSLConnection(String uri) throws IOException, InternalErrorException {
if(uri == null || uri.isEmpty()) throw new InternalErrorException("Uri must be filled, can't be null or empty.");
/*//KeyStore data
String keyStore = getAttributes().get("keyStore");
String keyStorePass = getAttributes().get("keyStorePass");
String keyStoreType = getAttributes().get("keyStoreType");
if(keyStore == null || keyStorePass == null || keyStoreType == null) {
throw new InternalErrorException("KeystorePath, KeystorePass and KeystoreType must be filled. Please look into configuration file.");
}
//TrustStore data
String trustStore = getAttributes().get("trustStore");
String trustStorePass = getAttributes().get("trustStorePass");
if(trustStore == null || trustStorePass == null) {
throw new InternalErrorException("TrustStorePath and TrustStorePass must be filled. Please look into configuration file.");
}
//set necessary keystore properties - using a p12 file
System.setProperty("javax.net.ssl.keyStore", keyStore);
System.setProperty("javax.net.ssl.keyStorePassword", keyStorePass);
System.setProperty("javax.net.ssl.keyStoreType", keyStoreType);
//set necessary truststore properties - using JKS
System.setProperty("javax.net.ssl.trustStore", trustStore);
System.setProperty("javax.net.ssl.trustStorePassword", trustStorePass);
// register a https protocol handler - this may be required for previous JDK versions
System.setProperty("java.protocol.handler.pkgs","com.sun.net.ssl.internal.www.protocol");*/
//prepare sslFactory
SSLSocketFactory factory = (SSLSocketFactory) SSLSocketFactory.getDefault();
HttpsURLConnection.setDefaultSSLSocketFactory(factory);
URL myurl = new URL(uri);
con = (HttpURLConnection) myurl.openConnection();
//set request header if is required (set in extSource xml)
String reqHeaderKey = getAttributes().get("requestHeaderKey");
String reqHeaderValue = getAttributes().get("requestHeaderValue");
if(reqHeaderKey != null) {
if(reqHeaderValue == null) reqHeaderValue = "";
con.setRequestProperty(reqHeaderKey, reqHeaderValue);
}
int responseCode = con.getResponseCode();
if(responseCode == 200) {
InputStream is = con.getInputStream();
return is;
}
throw new InternalErrorException("Wrong response code while opening connection on uri '" + uri + "'. Response code: " + responseCode);
}
/**
* Take plaintext query and create xpath query with concat function if needed.
* IMPORTANT: especially if there are these characters: ' (single quote) and " (double quotes)
*
* @param query string for xpath query in plain text format
*
* @return string for xpath, if there is needed, concat is used, if not, string without concet in quotes is returned, empty string if nothing in query
*/
protected String convertToXpathSearchString(String query) {
//if query is empty or null, return empty string
if(query == null || query.isEmpty()) {
return "";
}
//prepare array with parts of query for concating
List<String> parts = new ArrayList<>();
//prepare variables for behavior in for cycles through all characters in query
String part = "";
//if part contains double quote, doubleQuote = true, if single quote then doubleQuote = false
boolean doubleQuotes = false;
//create parts where single quotes are in double quotes and vice versa
for(char ch: query.toCharArray()) {
if(ch == '\'') {
if(!doubleQuotes) {
part+= ch;
} else {
parts.add("'" + part + "',");
part = "" + ch;
doubleQuotes = false;
}
} else if (ch == '"') {
if(doubleQuotes) {
part+= ch;
} else {
parts.add("\"" + part + "\",");
part = "" + ch;
doubleQuotes = true;
}
} else {
part+= ch;
}
}
//add the last part to the array
if(doubleQuotes) {
parts.add("'" + part + "'");
} else {
parts.add("\"" + part + "\"");
}
//prepare string with concat if needed
String result = "concat(";
if(parts.size() > 1 ) {
for(String str: parts) {
result+=str;
}
result+= ")";
} else {
//return only string if not need concat
return parts.get(0);
}
//return xpath query
return result;
}
@Override
public void close() {
if(con != null) con.disconnect();
}
@Override
public List<Map<String, String>> getSubjectGroups(Map<String, String> attributes) throws ExtSourceUnsupportedOperationException {
throw new ExtSourceUnsupportedOperationException();
}
@JsonIgnore
public HttpURLConnection getCon() {
return con;
}
@JsonIgnore
public void setCon(HttpURLConnection con) {
this.con = con;
}
protected Map<String,String> getAttributes() throws InternalErrorException {
return perunBl.getExtSourcesManagerBl().getAttributes(this);
}
}
| |
package com.enow.storm.TriggerTopology;
import java.net.UnknownHostException;
import java.util.Map;
import com.enow.persistence.redis.IRedisDB;
import com.enow.persistence.redis.RedisDB;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichBolt;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;
import org.bson.Document;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import com.enow.daos.mongoDAO.MongoDAO;
import com.esotericsoftware.minlog.Log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class IndexingBolt extends BaseRichBolt {
protected static final Logger _LOG = LoggerFactory.getLogger(IndexingBolt.class);
private OutputCollector collector;
private MongoDAO mongoDao;
private String mongoIp;
private int mongoPort;
private IRedisDB _redis;
private String redisIp;
private int redisPort;
@Override
public void prepare(Map conf, TopologyContext context, OutputCollector collector) {
this.collector = collector;
this.mongoIp = (String) conf.get("mongodb.ip");
Long lmongoPort = (Long) conf.get("mongodb.port");
this.mongoPort = lmongoPort.intValue();
this.redisIp = (String) conf.get("redis.ip");
Long lredisPort = (Long) conf.get("redis.port");
this.redisPort = lredisPort.intValue();
_redis = RedisDB.getInstance(redisIp, redisPort);
}
@Override
public void execute(Tuple input) {
JSONParser parser = new JSONParser();
JSONObject _jsonObject;
JSONObject _jsonError = new JSONObject();
JSONObject _jsonStop = new JSONObject();
boolean corporationNameCheck = false;
boolean roadMapIdCheck = false;
boolean brokerIdCheck = false;
boolean serverIdCheck = false;
StoppingRoadMap stoppingRoadMap;
if (input.toString().length() == 0) {
//if input tuple has no value log error : 1
_LOG.warn("error:1");
_jsonError.put("error", "true");
_jsonObject = _jsonError;
} else {
//input tuple has a value
String msg = input.getValues().toString().substring(1, input.getValues().toString().length() - 1);
try {
// connecting to MongoDB with docker ip address 192.168.99.100 and port 27017
// mongoDao = new MongoDAO("192.168.99.100",27017);
// connecting to MongoDB with ip address 127.0.0.1 and port 27017
mongoDao = new MongoDAO(mongoIp, mongoPort);
if (input.getSourceComponent().equals("event-spout")) {
//input from event kafka
try {
_jsonObject = (JSONObject) parser.parse(msg);
if (_jsonObject.containsKey("roadMapId") && _jsonObject.containsKey("status")) {
//check whether _jsonObject from event kafka has all necessary keys
if(_jsonObject.get("status").equals("start")){
String stoppingRoadMapID = (String)_jsonObject.get("roadMapId");
if(_redis.isTerminate(stoppingRoadMapID)){
_jsonStop.put("stop", "true");
_jsonObject = _jsonStop;
_LOG.warn("stop:1");
}else{
_jsonObject.put("spoutName", "event");
}
}else{
String stoppingRoadMapID = (String)_jsonObject.get("roadMapId");
_redis.addTerminate(stoppingRoadMapID);
stoppingRoadMap = new StoppingRoadMap(stoppingRoadMapID, _redis);
stoppingRoadMap.start();
_jsonStop.put("stop", "true");
_jsonObject = _jsonStop;
_LOG.warn("stop:1");
}
} else {
//if _jsonObject from event kafka doesn't have all necessary keys log error : 2
_LOG.warn("error:2");
_jsonError.put("error", "true");
_jsonObject = _jsonError;
}
} catch (ParseException e) {
//if input tuple from event kafka is not a json type log error : 3
_LOG.warn("error:3");
_jsonError.put("error", "true");
_jsonObject = _jsonError;
}
} else if (input.getSourceComponent().equals("order-spout")) {
//input from order kafka
try {
_jsonObject = (JSONObject) parser.parse(msg);
if (_jsonObject.containsKey("corporationName") && _jsonObject.containsKey("serverId")
&& _jsonObject.containsKey("brokerId") && _jsonObject.containsKey("roadMapId")
&& _jsonObject.containsKey("deviceId") && _jsonObject.containsKey("payload")) {
//check whether _jsonObject from order kafka has all necessary keys
String stoppingRoadMapID = (String)_jsonObject.get("roadMapId");
if (_redis.isTerminate(stoppingRoadMapID)) {
_jsonStop.put("stop", "true");
_jsonObject = _jsonStop;
_LOG.warn("stop:2");
} else {
if (_jsonObject.get("corporationName").equals("enow")) {
// check whether corporationName is enow
// for now corporationName must be enow
corporationNameCheck = true;
} else {
corporationNameCheck = false;
}
if (_jsonObject.get("serverId").equals("server0")) {
// check whether serverId is server0
// for now serverId must be server0
serverIdCheck = true;
} else {
serverIdCheck = false;
}
// connecting to brokerList collection in
// connectionData db
mongoDao.setDBCollection("connectionData", "brokerList");
if (mongoDao.collectionCount(
new Document("brokerId", (String) _jsonObject.get("brokerId"))) == 0) {
// if MongoDB collection broker has no same
// value as _jsonObject.get(broker)
brokerIdCheck = false;
} else if (mongoDao.collectionCount(
new Document("brokerId", (String) _jsonObject.get("brokerId"))) == 1) {
// if MongoDB collection broker has one same
// value as _jsonObject.get(broker)
brokerIdCheck = true;
} else {
// if MongoDB collection broker has more
// than two same values as
// _jsonObject.get(broker)
brokerIdCheck = false;
_LOG.debug("There are more than two broker ID on MongoDB");
}
// connecting to execute collection in enow db
mongoDao.setDBCollection("enow", "execute");
if (mongoDao.collectionCount(
new Document("roadMapId", (String) _jsonObject.get("roadMapId"))) == 0) {
// if MongoDB collection execute in enow db
// has no same value as
// _jsonObject.get(device)
roadMapIdCheck = false;
} else if (mongoDao.collectionCount(
new Document("roadMapId", (String) _jsonObject.get("roadMapId"))) == 1) {
// if MongoDB collection execute in enow db
// has one same value as
// _jsonObject.get(device)
roadMapIdCheck = true;
} else {
// if MongoDB collection execute in enow db
// has more than two same values as
// _jsonObject.get(device)
roadMapIdCheck = false;
_LOG.debug("There are more than two Phase Road-map Id on MongoDB");
}
if (corporationNameCheck && serverIdCheck && brokerIdCheck && roadMapIdCheck) {
_jsonObject.put("spoutName", "order");
} else {
// if more than one out of
// corporationNameCheck,serverIdCheck,
// brokerIdCheck, deviceIdCheck, and
// roadMapIdCheck is false log error : 4
_LOG.warn("error:4");
_jsonError.put("error", "true");
_jsonObject = _jsonError;
}
}
} else {
//if _jsonObject from order kafka doesn't have all necessary keys log error : 5
_LOG.warn("error:5");
_jsonError.put("error", "true");
_jsonObject = _jsonError;
}
} catch (ParseException e) {
//if input tuple from order kafka is not a json type log error : 6
_LOG.warn("error:6");
_jsonError.put("error", "true");
_jsonObject = _jsonError;
}
} else if (input.getSourceComponent().equals("proceed-spout")) {
//input from proceed kafka
try {
_jsonObject = (JSONObject) parser.parse(msg);
if (_jsonObject.containsKey("order") && _jsonObject.containsKey("roadMapId")
&& _jsonObject.containsKey("nodeId") && _jsonObject.containsKey("payload")
&& _jsonObject.containsKey("incomingNode") && _jsonObject.containsKey("outingNode")
&& _jsonObject.containsKey("previousData") && _jsonObject.containsKey("topic")
&& _jsonObject.containsKey("lastNode") && _jsonObject.containsKey("verified")
&& _jsonObject.containsKey("lambda")) {
String stoppingRoadMapID = (String)_jsonObject.get("roadMapId");
if(_redis.isTerminate(stoppingRoadMapID)){
_jsonStop.put("stop", "true");
_jsonObject = _jsonStop;
_LOG.warn("stop:3");
}else{
_jsonObject.put("spoutName", "proceed");
}
} else {
//if _jsonObject from proceed kafka doesn't have all necessary keys log error : 7
_LOG.warn("error:7");
_jsonError.put("error", "true");
_jsonObject = _jsonError;
}
} catch (ParseException e) {
//if input tuple from proceed kafka is not a json type log error : 8
_LOG.warn("error:8");
_jsonError.put("error", "true");
_jsonObject = _jsonError;
}
} else {
//if input tuple is not from event kafka or order kafka or proceed kafka log error : 9
_LOG.warn("error:9");
_jsonError.put("error", "true");
_jsonObject = _jsonError;
}
mongoDao.close();
} catch (UnknownHostException e) {
//if MongoDB connection falied log error : 10
_LOG.warn("error:10");
_jsonError.put("error", "true");
_jsonObject = _jsonError;
}
}
collector.emit(new Values(_jsonObject,(String)_jsonObject.get("roadMapId")));
try {
collector.ack(input);
} catch (Exception e) {
Log.warn("ack failed");
collector.fail(input);
}
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields("jsonObject","roadMapId"));
}
}
class StoppingRoadMap extends Thread {
String roadMapId;
IRedisDB _redis;
public StoppingRoadMap(String roadMapId, IRedisDB _redis) {
this.roadMapId = roadMapId;
this._redis = _redis;
}
public void run() {
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
_redis.deleteTerminate(roadMapId);
_redis.deleteNode(roadMapId + "-*");
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package first;
/**
*
* @author dev
*/
public class Appwindow extends javax.swing.JFrame {
/**
* Creates new form Appwindow
*/
public Appwindow() {
initComponents();
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jPanel1 = new javax.swing.JPanel();
jPanel6 = new javax.swing.JPanel();
jPanel7 = new javax.swing.JPanel();
jPanel8 = new javax.swing.JPanel();
jPanel5 = new javax.swing.JPanel();
jPanel10 = new javax.swing.JPanel();
jPanel3 = new javax.swing.JPanel();
jScrollPane1 = new javax.swing.JScrollPane();
jTree1 = new javax.swing.JTree();
jPanel9 = new javax.swing.JPanel();
jPanel4 = new javax.swing.JPanel();
jScrollPane2 = new javax.swing.JScrollPane();
jList1 = new javax.swing.JList();
jPanel11 = new javax.swing.JPanel();
jScrollPane3 = new javax.swing.JScrollPane();
jTable1 = new javax.swing.JTable();
jComboBox1 = new javax.swing.JComboBox();
jPanel2 = new javax.swing.JPanel();
jButton1 = new javax.swing.JButton();
jButton2 = new javax.swing.JButton();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
javax.swing.GroupLayout jPanel6Layout = new javax.swing.GroupLayout(jPanel6);
jPanel6.setLayout(jPanel6Layout);
jPanel6Layout.setHorizontalGroup(
jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 517, Short.MAX_VALUE)
);
jPanel6Layout.setVerticalGroup(
jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 70, Short.MAX_VALUE)
);
javax.swing.GroupLayout jPanel7Layout = new javax.swing.GroupLayout(jPanel7);
jPanel7.setLayout(jPanel7Layout);
jPanel7Layout.setHorizontalGroup(
jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 507, Short.MAX_VALUE)
);
jPanel7Layout.setVerticalGroup(
jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 70, Short.MAX_VALUE)
);
javax.swing.GroupLayout jPanel5Layout = new javax.swing.GroupLayout(jPanel5);
jPanel5.setLayout(jPanel5Layout);
jPanel5Layout.setHorizontalGroup(
jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 165, Short.MAX_VALUE)
);
jPanel5Layout.setVerticalGroup(
jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 0, Short.MAX_VALUE)
);
javax.swing.GroupLayout jPanel8Layout = new javax.swing.GroupLayout(jPanel8);
jPanel8.setLayout(jPanel8Layout);
jPanel8Layout.setHorizontalGroup(
jPanel8Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel8Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jPanel5, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
jPanel8Layout.setVerticalGroup(
jPanel8Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel8Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jPanel5, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addContainerGap())
);
jScrollPane1.setViewportView(jTree1);
javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3);
jPanel3.setLayout(jPanel3Layout);
jPanel3Layout.setHorizontalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 140, Short.MAX_VALUE)
.addContainerGap())
);
jPanel3Layout.setVerticalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel3Layout.createSequentialGroup()
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 286, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(89, 89, 89))
);
jList1.setModel(new javax.swing.AbstractListModel() {
String[] strings = { "Item 1", "Item 2", "Item 3", "Item 4", "Item 5" };
public int getSize() { return strings.length; }
public Object getElementAt(int i) { return strings[i]; }
});
jScrollPane2.setViewportView(jList1);
jTable1.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null}
},
new String [] {
"Title 1", "Title 2", "Title 3", "Title 4"
}
));
jTable1.setPreferredSize(new java.awt.Dimension(300, 60));
jScrollPane3.setViewportView(jTable1);
javax.swing.GroupLayout jPanel11Layout = new javax.swing.GroupLayout(jPanel11);
jPanel11.setLayout(jPanel11Layout);
jPanel11Layout.setHorizontalGroup(
jPanel11Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel11Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jScrollPane3, javax.swing.GroupLayout.PREFERRED_SIZE, 134, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(44, Short.MAX_VALUE))
);
jPanel11Layout.setVerticalGroup(
jPanel11Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel11Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jScrollPane3, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE)
.addContainerGap())
);
javax.swing.GroupLayout jPanel4Layout = new javax.swing.GroupLayout(jPanel4);
jPanel4.setLayout(jPanel4Layout);
jPanel4Layout.setHorizontalGroup(
jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel4Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 135, Short.MAX_VALUE)
.addGap(18, 18, 18)
.addComponent(jPanel11, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
jPanel4Layout.setVerticalGroup(
jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel4Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel4Layout.createSequentialGroup()
.addComponent(jPanel11, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addContainerGap())
.addGroup(jPanel4Layout.createSequentialGroup()
.addComponent(jScrollPane2)
.addGap(38, 38, 38))))
);
javax.swing.GroupLayout jPanel9Layout = new javax.swing.GroupLayout(jPanel9);
jPanel9.setLayout(jPanel9Layout);
jPanel9Layout.setHorizontalGroup(
jPanel9Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel9Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jPanel4, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addContainerGap())
);
jPanel9Layout.setVerticalGroup(
jPanel9Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel9Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jPanel4, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addContainerGap())
);
javax.swing.GroupLayout jPanel10Layout = new javax.swing.GroupLayout(jPanel10);
jPanel10.setLayout(jPanel10Layout);
jPanel10Layout.setHorizontalGroup(
jPanel10Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel10Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jPanel9, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addContainerGap())
);
jPanel10Layout.setVerticalGroup(
jPanel10Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel10Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
.addComponent(jPanel9, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
);
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(jPanel10, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(696, 696, 696)
.addComponent(jPanel8, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(jPanel6, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jPanel7, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel8, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(jPanel10, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(0, 31, Short.MAX_VALUE)))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jPanel6, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jPanel7, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
);
jComboBox1.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "Item 1", "Item 2", "Item 3", "Item 4" }));
jPanel2.setPreferredSize(new java.awt.Dimension(200, 662));
javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 161, Short.MAX_VALUE)
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 662, Short.MAX_VALUE)
);
jButton1.setText("jButton1");
jButton2.setText("jButton2");
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(6, 6, 6)
.addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, 558, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, 161, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createSequentialGroup()
.addGap(289, 289, 289)
.addComponent(jComboBox1, javax.swing.GroupLayout.PREFERRED_SIZE, 178, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addGap(0, 0, Short.MAX_VALUE)
.addComponent(jButton1)
.addGap(36, 36, 36)
.addComponent(jButton2)
.addGap(59, 59, 59))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jComboBox1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(29, 29, 29)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButton1)
.addComponent(jButton2))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
pack();
}// </editor-fold>//GEN-END:initComponents
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(Appwindow.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(Appwindow.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(Appwindow.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(Appwindow.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new Appwindow().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton jButton1;
private javax.swing.JButton jButton2;
private javax.swing.JComboBox jComboBox1;
private javax.swing.JList jList1;
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel10;
private javax.swing.JPanel jPanel11;
private javax.swing.JPanel jPanel2;
private javax.swing.JPanel jPanel3;
private javax.swing.JPanel jPanel4;
private javax.swing.JPanel jPanel5;
private javax.swing.JPanel jPanel6;
private javax.swing.JPanel jPanel7;
private javax.swing.JPanel jPanel8;
private javax.swing.JPanel jPanel9;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JScrollPane jScrollPane3;
private javax.swing.JTable jTable1;
private javax.swing.JTree jTree1;
// End of variables declaration//GEN-END:variables
}
| |
// Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.importdeps;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.io.MoreFiles.asCharSink;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.devtools.build.lib.view.proto.Deps.Dependencies;
import com.google.devtools.common.options.Converter;
import com.google.devtools.common.options.EnumConverter;
import com.google.devtools.common.options.Option;
import com.google.devtools.common.options.OptionDocumentationCategory;
import com.google.devtools.common.options.OptionEffectTag;
import com.google.devtools.common.options.OptionsBase;
import com.google.devtools.common.options.OptionsParser;
import com.google.devtools.common.options.OptionsParsingException;
import com.google.devtools.common.options.ShellQuotedParamsFilePreProcessor;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
/**
* A checker that checks the completeness of the dependencies of an import target (java_import or
* aar_import). If incomplete, it prints out the list of missing class names to the output file.
*/
public class Main {
/** Command line options. */
public static class Options extends OptionsBase {
@Option(
name = "input",
allowMultiple = true,
defaultValue = "",
category = "input",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
converter = ExistingPathConverter.class,
abbrev = 'i',
help = "Input jars with classes to check the completeness of their dependencies."
)
public List<Path> inputJars;
@Option(
name = "directdep",
allowMultiple = true,
defaultValue = "",
category = "input",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
converter = ExistingPathConverter.class,
help = "Subset of Jars listed in --classpath_entry that --input Jars are allowed to depend "
+ "on directly."
)
public List<Path> directClasspath;
@Option(
name = "classpath_entry",
allowMultiple = true,
defaultValue = "",
category = "input",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
converter = ExistingPathConverter.class,
help =
"Ordered classpath (Jar) to resolve symbols in the --input jars, like javac's -cp flag."
)
public List<Path> fullClasspath;
@Option(
name = "bootclasspath_entry",
allowMultiple = true,
defaultValue = "",
category = "input",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
converter = ExistingPathConverter.class,
help =
"Bootclasspath that was used to compile the --input Jar with, like javac's "
+ "-bootclasspath_entry flag (required)."
)
public List<Path> bootclasspath;
@Option(
name = "output",
defaultValue = "null",
category = "output",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
converter = PathConverter.class,
help = "Output path to save the result."
)
public Path output;
@Option(
name = "jdeps_output",
defaultValue = "null",
category = "output",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
converter = PathConverter.class,
help = "Output path to save the result.")
public Path jdepsOutput;
@Option(
name = "rule_label",
defaultValue = "",
category = "output",
documentationCategory = OptionDocumentationCategory.UNCATEGORIZED,
effectTags = {OptionEffectTag.UNKNOWN},
help = "The rule label of the current target under analysis.")
public String ruleLabel;
@Option(
name = "checking_mode",
defaultValue = "WARNING",
documentationCategory = OptionDocumentationCategory.UNDOCUMENTED,
effectTags = {OptionEffectTag.UNKNOWN},
converter = CheckingModeConverter.class,
help = "Controls the behavior of the checker."
)
public CheckingMode checkingMode;
}
/** A randomly picked large exit code to avoid collision with other common exit codes. */
private static final int DEPS_ERROR_EXIT_CODE = 199;
public static void main(String[] args) throws IOException {
System.exit(checkDeps(args));
}
@VisibleForTesting
static int checkDeps(String[] args) throws IOException {
Options options = parseCommandLineOptions(args);
if (!Files.exists(options.output)) {
Files.createFile(options.output); // Make sure the output file always exists.
}
int exitCode = 0;
try (ImportDepsChecker checker =
new ImportDepsChecker(
ImmutableList.copyOf(options.bootclasspath),
// Consider everything direct if no direct classpath is given
options.directClasspath.isEmpty()
? ImmutableList.copyOf(options.fullClasspath)
: ImmutableList.copyOf(options.directClasspath),
ImmutableList.copyOf(options.fullClasspath),
ImmutableList.copyOf(options.inputJars))) {
if (!checker.check() && options.checkingMode != CheckingMode.SILENCE) {
String result = checker.computeResultOutput(options.ruleLabel);
checkState(!result.isEmpty(), "The result should NOT be empty.");
exitCode = options.checkingMode == CheckingMode.ERROR ? DEPS_ERROR_EXIT_CODE : 0;
printErrorMessage(result, options);
asCharSink(options.output, StandardCharsets.UTF_8).write(result);
}
if (options.jdepsOutput != null) {
Dependencies dependencies = checker.emitJdepsProto(options.ruleLabel);
try (OutputStream os =
new BufferedOutputStream(Files.newOutputStream(options.jdepsOutput))) {
dependencies.writeTo(os);
}
}
}
return exitCode;
}
private static void printErrorMessage(String detailedErrorMessage, Options options) {
checkArgument(
options.checkingMode == CheckingMode.ERROR || options.checkingMode == CheckingMode.WARNING);
System.err.print(options.checkingMode == CheckingMode.ERROR ? "ERROR" : "WARNING");
System.err.printf(
": The dependencies for the following %d jar(s) are not complete.\n",
options.inputJars.size());
int index = 1;
for (Path jar : options.inputJars) {
System.err.printf(" %3d.%s\n", index++, jar.toString());
}
System.err.println("The details are listed below:");
System.err.print(detailedErrorMessage);
}
@VisibleForTesting
static Options parseCommandLineOptions(String[] args) throws IOException {
OptionsParser optionsParser = OptionsParser.newOptionsParser(Options.class);
optionsParser.setAllowResidue(false);
optionsParser.enableParamsFileSupport(
new ShellQuotedParamsFilePreProcessor(FileSystems.getDefault()));
optionsParser.parseAndExitUponError(args);
Options options = optionsParser.getOptions(Options.class);
checkArgument(!options.inputJars.isEmpty(), "--input is required");
checkArgument(options.output != null, "--output is required");
checkArgument(!options.bootclasspath.isEmpty(), "--bootclasspath_entry is required");
checkArgument(
options.jdepsOutput == null || !Files.isDirectory(options.jdepsOutput),
"Invalid value of --jdeps_output: '%s'",
options.jdepsOutput);
if (!options.fullClasspath.containsAll(options.directClasspath)) {
ArrayList<Path> missing = Lists.newArrayList(options.directClasspath);
missing.removeAll(options.fullClasspath);
throw new IllegalArgumentException(
"--strictdeps must be a subset of --classpath_entry but has additional entries: "
+ missing);
}
return options;
}
/** Validating converter for Paths. A Path is considered valid if it resolves to a file. */
public static class PathConverter implements Converter<Path> {
private final boolean mustExist;
public PathConverter() {
this.mustExist = false;
}
protected PathConverter(boolean mustExist) {
this.mustExist = mustExist;
}
@Override
public Path convert(String input) throws OptionsParsingException {
try {
Path path = FileSystems.getDefault().getPath(input);
if (mustExist && !Files.exists(path)) {
throw new OptionsParsingException(
String.format("%s is not a valid path: it does not exist.", input));
}
return path;
} catch (InvalidPathException e) {
throw new OptionsParsingException(
String.format("%s is not a valid path: %s.", input, e.getMessage()), e);
}
}
@Override
public String getTypeDescription() {
return "a valid filesystem path";
}
}
/**
* Validating converter for Paths. A Path is considered valid if it resolves to a file and exists.
*/
public static class ExistingPathConverter extends PathConverter {
public ExistingPathConverter() {
super(true);
}
}
/** Converter for {@link CheckingMode} */
public static class CheckingModeConverter extends EnumConverter<CheckingMode> {
public CheckingModeConverter() {
super(CheckingMode.class, "The checking mode for the dependency checker.");
}
}
/**
* The checking mode of the dependency checker.
*/
public enum CheckingMode {
/** Emit 'errors' on missing or incomplete dependencies. */
ERROR,
/** Emit 'warnings' on missing or incomplete dependencies. */
WARNING,
/**
* Emit 'nothing' on missing or incomplete dependencies. This is mainly used to dump jdeps
* protos.
*/
SILENCE
}
}
| |
/*
* Copyright 2011-2016 Amazon Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://aws.amazon.com/apache2.0
*
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.s3.transfer.internal;
import static com.amazonaws.event.SDKProgressPublisher.publishProgress;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import com.amazonaws.AmazonClientException;
import com.amazonaws.event.ProgressEventType;
import com.amazonaws.event.ProgressListenerChain;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.PartETag;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.transfer.PauseResult;
import com.amazonaws.services.s3.transfer.PauseStatus;
import com.amazonaws.services.s3.transfer.PersistableUpload;
import com.amazonaws.services.s3.transfer.Transfer.TransferState;
import com.amazonaws.services.s3.transfer.TransferManager;
import com.amazonaws.services.s3.transfer.model.UploadResult;
/**
* Manages an upload by periodically checking to see if the upload is done, and
* returning a result if so. Otherwise, schedules a copy of itself to be run in
* the future and returns null. When waiting on the result of this class via a
* Future object, clients must call {@link UploadMonitor#isDone()} and
* {@link UploadMonitor#getFuture()}
*/
public class UploadMonitor implements Callable<UploadResult>, TransferMonitor {
private final AmazonS3 s3;
private final PutObjectRequest origReq;
private final ProgressListenerChain listener;
private final UploadCallable multipartUploadCallable;
private final UploadImpl transfer;
private final ExecutorService threadPool;
/*
* Futures of threads that upload the parts.
*/
private final List<Future<PartETag>> futures = Collections
.synchronizedList(new ArrayList<Future<PartETag>>());
/*
* State for clients wishing to poll for completion
*/
private boolean isUploadDone = false;
private Future<UploadResult> future;
public synchronized Future<UploadResult> getFuture() {
return future;
}
private synchronized void setFuture(Future<UploadResult> future) {
this.future = future;
}
private synchronized void cancelFuture() {
future.cancel(true);
}
public synchronized boolean isDone() {
return isUploadDone;
}
private synchronized void markAllDone() {
isUploadDone = true;
}
/**
* Constructs a new upload watcher and then immediately submits it to
* the thread pool.
*
* @param manager
* The {@link TransferManager} that owns this upload.
* @param transfer
* The transfer being processed.
* @param threadPool
* The {@link ExecutorService} to which we should submit new
* tasks.
* @param multipartUploadCallable
* The callable responsible for processing the upload
* asynchronously
* @param putObjectRequest
* The original putObject request
* @param progressListenerChain
* A chain of listeners that wish to be notified of upload
* progress
*/
public static UploadMonitor create(
TransferManager manager,
UploadImpl transfer,
ExecutorService threadPool,
UploadCallable multipartUploadCallable,
PutObjectRequest putObjectRequest,
ProgressListenerChain progressListenerChain) {
UploadMonitor uploadMonitor = new UploadMonitor(manager, transfer,
threadPool, multipartUploadCallable, putObjectRequest,
progressListenerChain);
uploadMonitor.setFuture(threadPool.submit(uploadMonitor));
return uploadMonitor;
}
private UploadMonitor(TransferManager manager, UploadImpl transfer, ExecutorService threadPool,
UploadCallable multipartUploadCallable, PutObjectRequest putObjectRequest,
ProgressListenerChain progressListenerChain) {
this.s3 = manager.getAmazonS3Client();
this.multipartUploadCallable = multipartUploadCallable;
this.origReq = putObjectRequest;
this.listener = progressListenerChain;
this.transfer = transfer;
this.threadPool = threadPool;
}
@Override
public UploadResult call() throws Exception {
try {
UploadResult result = multipartUploadCallable.call();
/**
* If the result is null, it is a mutli part parellel upload. So, an
* new task is submitted for initiating a complete multi part upload
* request.
*/
if (result == null) {
futures.addAll(multipartUploadCallable.getFutures());
setFuture(threadPool.submit(new CompleteMultipartUpload(
multipartUploadCallable.getMultipartUploadId(), s3,
origReq, futures, multipartUploadCallable
.getETags(), listener, this)));
} else {
uploadComplete();
}
return result;
} catch (CancellationException e) {
transfer.setState(TransferState.Canceled);
publishProgress(listener, ProgressEventType.TRANSFER_CANCELED_EVENT);
throw new AmazonClientException("Upload canceled");
} catch (Exception e) {
transfer.setState(TransferState.Failed);
throw e;
}
}
void uploadComplete() {
markAllDone();
transfer.setState(TransferState.Completed);
// AmazonS3Client takes care of all the events for single part uploads,
// so we only need to send a completed event for multipart uploads.
if (multipartUploadCallable.isMultipartUpload()) {
publishProgress(listener, ProgressEventType.TRANSFER_COMPLETED_EVENT);
}
}
/**
* Cancels the futures in the following cases - If the user has requested
* for forcefully aborting the transfers. - If the upload is a multi part
* parellel upload. - If the upload operation hasn't started. Cancels all
* the in flight transfers of the upload if applicable. Returns the
* multi-part upload Id in case of the parallel multi-part uploads. Returns
* null otherwise.
*/
PauseResult<PersistableUpload> pause(boolean forceCancel) {
PersistableUpload persistableUpload = multipartUploadCallable
.getPersistableUpload();
if (persistableUpload == null) {
PauseStatus pauseStatus = TransferManagerUtils
.determinePauseStatus(transfer.getState(), forceCancel);
if (forceCancel) {
cancelFutures();
multipartUploadCallable.performAbortMultipartUpload();
}
return new PauseResult<PersistableUpload>(pauseStatus);
}
cancelFutures();
return new PauseResult<PersistableUpload>(PauseStatus.SUCCESS,
persistableUpload);
}
/**
* Cancels the inflight transfers if they are not completed.
*/
private void cancelFutures() {
cancelFuture();
for (Future<PartETag> f : futures) {
f.cancel(true);
}
multipartUploadCallable.getFutures().clear();
futures.clear();
}
/**
* Cancels all the futures associated with this upload operation. Also
* cleans up the parts on Amazon S3 if the upload is performed as a
* multi-part upload operation.
*/
void performAbort() {
cancelFutures();
multipartUploadCallable.performAbortMultipartUpload();
publishProgress(listener, ProgressEventType.TRANSFER_CANCELED_EVENT);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connectors.hive;
import org.apache.flink.api.dag.Transformation;
import org.apache.flink.configuration.ReadableConfig;
import org.apache.flink.connectors.hive.read.HiveTableInputFormat;
import org.apache.flink.core.execution.JobClient;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.transformations.OneInputTransformation;
import org.apache.flink.streaming.api.transformations.PartitionTransformation;
import org.apache.flink.table.HiveVersionTestUtil;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.api.config.ExecutionConfigOptions;
import org.apache.flink.table.api.internal.TableEnvironmentImpl;
import org.apache.flink.table.catalog.CatalogPartitionSpec;
import org.apache.flink.table.catalog.CatalogTable;
import org.apache.flink.table.catalog.ObjectPath;
import org.apache.flink.table.catalog.exceptions.CatalogException;
import org.apache.flink.table.catalog.exceptions.TableNotExistException;
import org.apache.flink.table.catalog.exceptions.TableNotPartitionedException;
import org.apache.flink.table.catalog.hive.HiveCatalog;
import org.apache.flink.table.catalog.hive.HiveTestUtils;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.factories.TableSourceFactory;
import org.apache.flink.table.planner.delegation.PlannerBase;
import org.apache.flink.table.planner.plan.nodes.exec.ExecNode;
import org.apache.flink.table.planner.runtime.utils.BatchAbstractTestBase;
import org.apache.flink.table.planner.runtime.utils.StreamTestSink;
import org.apache.flink.table.planner.runtime.utils.TestingAppendRowDataSink;
import org.apache.flink.table.planner.runtime.utils.TestingAppendSink;
import org.apache.flink.table.planner.utils.JavaScalaConversionUtil;
import org.apache.flink.table.planner.utils.TableTestUtil;
import org.apache.flink.table.runtime.typeutils.RowDataTypeInfo;
import org.apache.flink.test.util.TestBaseUtils;
import org.apache.flink.types.Row;
import org.apache.flink.shaded.guava18.com.google.common.collect.Lists;
import com.klarna.hiverunner.HiveShell;
import com.klarna.hiverunner.annotations.HiveSQL;
import org.apache.calcite.rel.RelNode;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.mapred.JobConf;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static org.apache.flink.table.catalog.hive.HiveTestUtils.createTableEnvWithHiveCatalog;
import static org.apache.flink.table.catalog.hive.HiveTestUtils.waitForJobFinish;
import static org.apache.flink.table.planner.utils.JavaScalaConversionUtil.toScala;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.spy;
/**
* Tests {@link HiveTableSource}.
*/
@RunWith(FlinkStandaloneHiveRunner.class)
public class HiveTableSourceTest extends BatchAbstractTestBase {
@HiveSQL(files = {})
private static HiveShell hiveShell;
private static HiveCatalog hiveCatalog;
private static HiveConf hiveConf;
@BeforeClass
public static void createCatalog() throws IOException {
hiveConf = hiveShell.getHiveConf();
hiveCatalog = HiveTestUtils.createHiveCatalog(hiveConf);
hiveCatalog.open();
}
@AfterClass
public static void closeCatalog() {
if (null != hiveCatalog) {
hiveCatalog.close();
}
}
@Before
public void setupSourceDatabaseAndData() {
hiveShell.execute("CREATE DATABASE IF NOT EXISTS source_db");
}
@Test
public void testReadNonPartitionedTable() throws Exception {
final String catalogName = "hive";
final String dbName = "source_db";
final String tblName = "test";
hiveShell.execute("CREATE TABLE source_db.test ( a INT, b INT, c STRING, d BIGINT, e DOUBLE)");
HiveTestUtils.createTextTableInserter(hiveShell, dbName, tblName)
.addRow(new Object[] { 1, 1, "a", 1000L, 1.11 })
.addRow(new Object[] { 2, 2, "b", 2000L, 2.22 })
.addRow(new Object[] { 3, 3, "c", 3000L, 3.33 })
.addRow(new Object[] { 4, 4, "d", 4000L, 4.44 })
.commit();
TableEnvironment tEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode();
tEnv.registerCatalog(catalogName, hiveCatalog);
Table src = tEnv.sqlQuery("select * from hive.source_db.test");
List<Row> rows = Lists.newArrayList(src.execute().collect());
Assert.assertEquals(4, rows.size());
Assert.assertEquals("1,1,a,1000,1.11", rows.get(0).toString());
Assert.assertEquals("2,2,b,2000,2.22", rows.get(1).toString());
Assert.assertEquals("3,3,c,3000,3.33", rows.get(2).toString());
Assert.assertEquals("4,4,d,4000,4.44", rows.get(3).toString());
}
@Test
public void testReadComplexDataType() throws Exception {
final String catalogName = "hive";
final String dbName = "source_db";
final String tblName = "complex_test";
hiveShell.execute("create table source_db.complex_test(" +
"a array<int>, m map<int,string>, s struct<f1:int,f2:bigint>)");
Integer[] array = new Integer[]{1, 2, 3};
Map<Integer, String> map = new LinkedHashMap<>();
map.put(1, "a");
map.put(2, "b");
Object[] struct = new Object[]{3, 3L};
HiveTestUtils.createTextTableInserter(hiveShell, dbName, tblName)
.addRow(new Object[]{array, map, struct})
.commit();
TableEnvironment tEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode();
tEnv.registerCatalog(catalogName, hiveCatalog);
Table src = tEnv.sqlQuery("select * from hive.source_db.complex_test");
List<Row> rows = Lists.newArrayList(src.execute().collect());
Assert.assertEquals(1, rows.size());
assertArrayEquals(array, (Integer[]) rows.get(0).getField(0));
assertEquals(map, rows.get(0).getField(1));
assertEquals(Row.of(struct[0], struct[1]), rows.get(0).getField(2));
}
/**
* Test to read from partition table.
* @throws Exception
*/
@Test
public void testReadPartitionTable() throws Exception {
final String catalogName = "hive";
final String dbName = "source_db";
final String tblName = "test_table_pt";
hiveShell.execute("CREATE TABLE source_db.test_table_pt " +
"(year STRING, value INT) partitioned by (pt int);");
HiveTestUtils.createTextTableInserter(hiveShell, dbName, tblName)
.addRow(new Object[]{"2014", 3})
.addRow(new Object[]{"2014", 4})
.commit("pt=0");
HiveTestUtils.createTextTableInserter(hiveShell, dbName, tblName)
.addRow(new Object[]{"2015", 2})
.addRow(new Object[]{"2015", 5})
.commit("pt=1");
TableEnvironment tEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode();
tEnv.registerCatalog(catalogName, hiveCatalog);
Table src = tEnv.sqlQuery("select * from hive.source_db.test_table_pt");
List<Row> rows = Lists.newArrayList(src.execute().collect());
assertEquals(4, rows.size());
Object[] rowStrings = rows.stream().map(Row::toString).sorted().toArray();
assertArrayEquals(new String[]{"2014,3,0", "2014,4,0", "2015,2,1", "2015,5,1"}, rowStrings);
}
@Test
public void testPartitionPrunning() throws Exception {
final String catalogName = "hive";
final String dbName = "source_db";
final String tblName = "test_table_pt_1";
hiveShell.execute("CREATE TABLE source_db.test_table_pt_1 " +
"(year STRING, value INT) partitioned by (pt int);");
HiveTestUtils.createTextTableInserter(hiveShell, dbName, tblName)
.addRow(new Object[]{"2014", 3})
.addRow(new Object[]{"2014", 4})
.commit("pt=0");
HiveTestUtils.createTextTableInserter(hiveShell, dbName, tblName)
.addRow(new Object[]{"2015", 2})
.addRow(new Object[]{"2015", 5})
.commit("pt=1");
TableEnvironment tEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode();
tEnv.registerCatalog(catalogName, hiveCatalog);
Table src = tEnv.sqlQuery("select * from hive.source_db.test_table_pt_1 where pt = 0");
// first check execution plan to ensure partition prunning works
String[] explain = src.explain().split("==.*==\n");
assertEquals(4, explain.length);
String optimizedLogicalPlan = explain[2];
String physicalExecutionPlan = explain[3];
assertTrue(optimizedLogicalPlan, optimizedLogicalPlan.contains(
"HiveTableSource(year, value, pt) TablePath: source_db.test_table_pt_1, PartitionPruned: true, PartitionNums: 1"));
assertTrue(physicalExecutionPlan, physicalExecutionPlan.contains(
"HiveTableSource(year, value, pt) TablePath: source_db.test_table_pt_1, PartitionPruned: true, PartitionNums: 1"));
// second check execute results
List<Row> rows = Lists.newArrayList(src.execute().collect());
assertEquals(2, rows.size());
Object[] rowStrings = rows.stream().map(Row::toString).sorted().toArray();
assertArrayEquals(new String[]{"2014,3,0", "2014,4,0"}, rowStrings);
}
@Test
public void testPartitionFilter() throws Exception {
hiveShell.execute("create database db1");
try {
hiveShell.execute("create table db1.part(x int) partitioned by (p1 int,p2 string)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part")
.addRow(new Object[]{1}).commit("p1=1,p2='a'");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part")
.addRow(new Object[]{2}).commit("p1=2,p2='b'");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part")
.addRow(new Object[]{3}).commit("p1=3,p2='c'");
// test string partition columns with special characters
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part")
.addRow(new Object[]{4}).commit("p1=4,p2='c:2'");
TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode();
TestPartitionFilterCatalog catalog = new TestPartitionFilterCatalog(
hiveCatalog.getName(), hiveCatalog.getDefaultDatabase(), hiveCatalog.getHiveConf(), hiveCatalog.getHiveVersion());
tableEnv.registerCatalog(catalog.getName(), catalog);
tableEnv.useCatalog(catalog.getName());
Table query = tableEnv.sqlQuery("select x from db1.part where p1>1 or p2<>'a' order by x");
String[] explain = query.explain().split("==.*==\n");
assertFalse(catalog.fallback);
String optimizedPlan = explain[2];
assertTrue(optimizedPlan, optimizedPlan.contains("PartitionPruned: true, PartitionNums: 3"));
List<Row> results = Lists.newArrayList(query.execute().collect());
assertEquals("[2, 3, 4]", results.toString());
query = tableEnv.sqlQuery("select x from db1.part where p1>2 and p2<='a' order by x");
explain = query.explain().split("==.*==\n");
assertFalse(catalog.fallback);
optimizedPlan = explain[2];
assertTrue(optimizedPlan, optimizedPlan.contains("PartitionPruned: true, PartitionNums: 0"));
results = Lists.newArrayList(query.execute().collect());
assertEquals("[]", results.toString());
query = tableEnv.sqlQuery("select x from db1.part where p1 in (1,3,5) order by x");
explain = query.explain().split("==.*==\n");
assertFalse(catalog.fallback);
optimizedPlan = explain[2];
assertTrue(optimizedPlan, optimizedPlan.contains("PartitionPruned: true, PartitionNums: 2"));
results = Lists.newArrayList(query.execute().collect());
assertEquals("[1, 3]", results.toString());
query = tableEnv.sqlQuery("select x from db1.part where (p1=1 and p2='a') or ((p1=2 and p2='b') or p2='d') order by x");
explain = query.explain().split("==.*==\n");
assertFalse(catalog.fallback);
optimizedPlan = explain[2];
assertTrue(optimizedPlan, optimizedPlan.contains("PartitionPruned: true, PartitionNums: 2"));
results = Lists.newArrayList(query.execute().collect());
assertEquals("[1, 2]", results.toString());
query = tableEnv.sqlQuery("select x from db1.part where p2 = 'c:2' order by x");
explain = query.explain().split("==.*==\n");
assertFalse(catalog.fallback);
optimizedPlan = explain[2];
assertTrue(optimizedPlan, optimizedPlan.contains("PartitionPruned: true, PartitionNums: 1"));
results = Lists.newArrayList(query.execute().collect());
assertEquals("[4]", results.toString());
} finally {
hiveShell.execute("drop database db1 cascade");
}
}
@Test
public void testPartitionFilterDateTimestamp() throws Exception {
hiveShell.execute("create database db1");
try {
hiveShell.execute("create table db1.part(x int) partitioned by (p1 date,p2 timestamp)");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part")
.addRow(new Object[]{1}).commit("p1='2018-08-08',p2='2018-08-08 08:08:08'");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part")
.addRow(new Object[]{2}).commit("p1='2018-08-09',p2='2018-08-08 08:08:09'");
HiveTestUtils.createTextTableInserter(hiveShell, "db1", "part")
.addRow(new Object[]{3}).commit("p1='2018-08-10',p2='2018-08-08 08:08:10'");
TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode();
TestPartitionFilterCatalog catalog = new TestPartitionFilterCatalog(
hiveCatalog.getName(), hiveCatalog.getDefaultDatabase(), hiveCatalog.getHiveConf(), hiveCatalog.getHiveVersion());
tableEnv.registerCatalog(catalog.getName(), catalog);
tableEnv.useCatalog(catalog.getName());
Table query = tableEnv.sqlQuery(
"select x from db1.part where p1>cast('2018-08-09' as date) and p2<>cast('2018-08-08 08:08:09' as timestamp)");
String[] explain = query.explain().split("==.*==\n");
assertTrue(catalog.fallback);
String optimizedPlan = explain[2];
assertTrue(optimizedPlan, optimizedPlan.contains("PartitionPruned: true, PartitionNums: 1"));
List<Row> results = Lists.newArrayList(query.execute().collect());
assertEquals("[3]", results.toString());
System.out.println(results);
} finally {
hiveShell.execute("drop database db1 cascade");
}
}
@Test
public void testProjectionPushDown() throws Exception {
hiveShell.execute("create table src(x int,y string) partitioned by (p1 bigint, p2 string)");
final String catalogName = "hive";
try {
HiveTestUtils.createTextTableInserter(hiveShell, "default", "src")
.addRow(new Object[]{1, "a"})
.addRow(new Object[]{2, "b"})
.commit("p1=2013, p2='2013'");
HiveTestUtils.createTextTableInserter(hiveShell, "default", "src")
.addRow(new Object[]{3, "c"})
.commit("p1=2014, p2='2014'");
TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode();
tableEnv.registerCatalog(catalogName, hiveCatalog);
Table table = tableEnv.sqlQuery("select p1, count(y) from hive.`default`.src group by p1");
String[] explain = table.explain().split("==.*==\n");
assertEquals(4, explain.length);
String logicalPlan = explain[2];
String physicalPlan = explain[3];
String expectedExplain =
"HiveTableSource(x, y, p1, p2) TablePath: default.src, PartitionPruned: false, PartitionNums: null, ProjectedFields: [2, 1]";
assertTrue(logicalPlan, logicalPlan.contains(expectedExplain));
assertTrue(physicalPlan, physicalPlan.contains(expectedExplain));
List<Row> rows = Lists.newArrayList(table.execute().collect());
assertEquals(2, rows.size());
Object[] rowStrings = rows.stream().map(Row::toString).sorted().toArray();
assertArrayEquals(new String[]{"2013,2", "2014,1"}, rowStrings);
} finally {
hiveShell.execute("drop table src");
}
}
@Test
public void testLimitPushDown() throws Exception {
hiveShell.execute("create table src (a string)");
final String catalogName = "hive";
try {
HiveTestUtils.createTextTableInserter(hiveShell, "default", "src")
.addRow(new Object[]{"a"})
.addRow(new Object[]{"b"})
.addRow(new Object[]{"c"})
.addRow(new Object[]{"d"})
.commit();
//Add this to obtain correct stats of table to avoid FLINK-14965 problem
hiveShell.execute("analyze table src COMPUTE STATISTICS");
TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode();
tableEnv.registerCatalog(catalogName, hiveCatalog);
Table table = tableEnv.sqlQuery("select * from hive.`default`.src limit 1");
String[] explain = table.explain().split("==.*==\n");
assertEquals(4, explain.length);
String logicalPlan = explain[2];
String physicalPlan = explain[3];
String expectedExplain = "HiveTableSource(a) TablePath: default.src, PartitionPruned: false, " +
"PartitionNums: null, LimitPushDown true, Limit 1";
assertTrue(logicalPlan.contains(expectedExplain));
assertTrue(physicalPlan.contains(expectedExplain));
List<Row> rows = Lists.newArrayList(table.execute().collect());
assertEquals(1, rows.size());
Object[] rowStrings = rows.stream().map(Row::toString).sorted().toArray();
assertArrayEquals(new String[]{"a"}, rowStrings);
} finally {
hiveShell.execute("drop table src");
}
}
@Test
public void testParallelismSetting() {
final String catalogName = "hive";
final String dbName = "source_db";
final String tblName = "test_parallelism";
hiveShell.execute("CREATE TABLE source_db.test_parallelism " +
"(year STRING, value INT) partitioned by (pt int);");
HiveTestUtils.createTextTableInserter(hiveShell, dbName, tblName)
.addRow(new Object[]{"2014", 3})
.addRow(new Object[]{"2014", 4})
.commit("pt=0");
HiveTestUtils.createTextTableInserter(hiveShell, dbName, tblName)
.addRow(new Object[]{"2015", 2})
.addRow(new Object[]{"2015", 5})
.commit("pt=1");
TableEnvironment tEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode();
tEnv.registerCatalog(catalogName, hiveCatalog);
Table table = tEnv.sqlQuery("select * from hive.source_db.test_parallelism");
PlannerBase planner = (PlannerBase) ((TableEnvironmentImpl) tEnv).getPlanner();
RelNode relNode = planner.optimize(TableTestUtil.toRelNode(table));
ExecNode execNode = planner.translateToExecNodePlan(toScala(Collections.singletonList(relNode))).get(0);
@SuppressWarnings("unchecked")
Transformation transformation = execNode.translateToPlan(planner);
Assert.assertEquals(2, transformation.getParallelism());
}
@Test
public void testParallelismOnLimitPushDown() {
final String catalogName = "hive";
final String dbName = "source_db";
final String tblName = "test_parallelism_limit_pushdown";
hiveShell.execute("CREATE TABLE source_db.test_parallelism_limit_pushdown " +
"(year STRING, value INT) partitioned by (pt int);");
HiveTestUtils.createTextTableInserter(hiveShell, dbName, tblName)
.addRow(new Object[]{"2014", 3})
.addRow(new Object[]{"2014", 4})
.commit("pt=0");
HiveTestUtils.createTextTableInserter(hiveShell, dbName, tblName)
.addRow(new Object[]{"2015", 2})
.addRow(new Object[]{"2015", 5})
.commit("pt=1");
TableEnvironment tEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode();
tEnv.getConfig().getConfiguration().setBoolean(
HiveOptions.TABLE_EXEC_HIVE_INFER_SOURCE_PARALLELISM, false);
tEnv.getConfig().getConfiguration().setInteger(
ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 2);
tEnv.registerCatalog(catalogName, hiveCatalog);
Table table = tEnv.sqlQuery("select * from hive.source_db.test_parallelism_limit_pushdown limit 1");
PlannerBase planner = (PlannerBase) ((TableEnvironmentImpl) tEnv).getPlanner();
RelNode relNode = planner.optimize(TableTestUtil.toRelNode(table));
ExecNode execNode = planner.translateToExecNodePlan(toScala(Collections.singletonList(relNode))).get(0);
@SuppressWarnings("unchecked")
Transformation transformation = execNode.translateToPlan(planner);
Assert.assertEquals(1, ((PartitionTransformation) ((OneInputTransformation) transformation).getInput())
.getInput().getParallelism());
}
@Test
public void testSourceConfig() throws Exception {
// vector reader not available for 1.x and we're not testing orc for 2.0.x
Assume.assumeTrue(HiveVersionTestUtil.HIVE_210_OR_LATER);
Map<String, String> env = System.getenv();
hiveShell.execute("create database db1");
try {
hiveShell.execute("create table db1.src (x int,y string) stored as orc");
hiveShell.execute("insert into db1.src values (1,'a'),(2,'b')");
testSourceConfig(true, true);
testSourceConfig(false, false);
} finally {
TestBaseUtils.setEnv(env);
hiveShell.execute("drop database db1 cascade");
}
}
@Test(timeout = 120000)
public void testStreamPartitionRead() throws Exception {
final String catalogName = "hive";
final String dbName = "source_db";
final String tblName = "stream_test";
hiveShell.execute("CREATE TABLE source_db.stream_test (" +
" a INT," +
" b STRING" +
") PARTITIONED BY (ts STRING) TBLPROPERTIES (" +
"'streaming-source.enable'='true'," +
"'streaming-source.monitor-interval'='100ms'," +
"'streaming-source.consume-order'='partition-time'" +
")");
HiveTestUtils.createTextTableInserter(hiveShell, dbName, tblName)
.addRow(new Object[]{0, "0"})
.commit("ts='2020-05-06 00:00:00'");
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
StreamTableEnvironment tEnv = HiveTestUtils.createTableEnvWithBlinkPlannerStreamMode(env);
tEnv.registerCatalog(catalogName, hiveCatalog);
Table src = tEnv.from("hive.source_db.stream_test");
TestingAppendRowDataSink sink = new TestingAppendRowDataSink(new RowDataTypeInfo(
DataTypes.INT().getLogicalType(),
DataTypes.STRING().getLogicalType(),
DataTypes.STRING().getLogicalType()));
DataStream<RowData> out = tEnv.toAppendStream(src, RowData.class);
out.print(); // add print to see streaming reading
out.addSink(sink);
JobClient job = env.executeAsync("job");
Runnable runnable = () -> {
for (int i = 1; i < 6; i++) {
try {
Thread.sleep(5_000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
HiveTestUtils.createTextTableInserter(hiveShell, dbName, tblName)
.addRow(new Object[]{i, String.valueOf(i)})
.commit("ts='2020-05-06 00:" + i + "0:00'");
}
};
Thread thread = new Thread(runnable);
thread.setDaemon(true);
thread.start();
thread.join();
Thread.sleep(5_000);
List<String> expected = Arrays.asList(
"+I(0,0,2020-05-06 00:00:00)",
"+I(1,1,2020-05-06 00:10:00)",
"+I(2,2,2020-05-06 00:20:00)",
"+I(3,3,2020-05-06 00:30:00)",
"+I(4,4,2020-05-06 00:40:00)",
"+I(5,5,2020-05-06 00:50:00)"
);
List<String> results = sink.getJavaAppendResults();
results.sort(String::compareTo);
assertEquals(expected, results);
job.cancel();
StreamTestSink.clear();
}
@Test(timeout = 30000)
public void testNonPartitionStreamingSourceWithMapredReader() throws Exception {
testNonPartitionStreamingSource(true, "test_mapred_reader");
}
@Test(timeout = 30000)
public void testNonPartitionStreamingSourceWithVectorizedReader() throws Exception {
testNonPartitionStreamingSource(false, "test_vectorized_reader");
}
private void testNonPartitionStreamingSource(Boolean useMapredReader, String tblName) throws Exception {
final String catalogName = "hive";
final String dbName = "source_db";
hiveShell.execute("CREATE TABLE source_db." + tblName + " (" +
" a INT," +
" b CHAR(1) " +
") stored as parquet TBLPROPERTIES (" +
" 'streaming-source.enable'='true'," +
" 'streaming-source.monitor-interval'='100ms'" +
")");
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
StreamTableEnvironment tEnv = HiveTestUtils.createTableEnvWithBlinkPlannerStreamMode(env);
tEnv.getConfig().getConfiguration().setBoolean(
HiveOptions.TABLE_EXEC_HIVE_FALLBACK_MAPRED_READER, useMapredReader);
tEnv.registerCatalog(catalogName, hiveCatalog);
Table src = tEnv.sqlQuery("select * from hive.source_db." + tblName);
TestingAppendSink sink = new TestingAppendSink();
tEnv.toAppendStream(src, Row.class).addSink(sink);
DataStream<RowData> out = tEnv.toAppendStream(src, RowData.class);
out.print(); // add print to see streaming reading
final JobClient jobClient = env.executeAsync();
Runnable runnable = () -> {
for (int i = 0; i < 3; ++i) {
hiveShell.execute("insert into source_db." + tblName + " values (1,'a'), (2,'b')");
try {
Thread.sleep(2_000);
} catch (InterruptedException e) {
e.printStackTrace();
break;
}
}
};
Thread thread = new Thread(runnable);
thread.setDaemon(true);
thread.start();
// Waiting for writing test data to finish
thread.join();
// Wait up to 20 seconds for all data to be processed
for (int i = 0; i < 20; ++i) {
if (sink.getAppendResults().size() == 6) {
break;
} else {
Thread.sleep(1000);
}
}
// check the result
List<String> actual = new ArrayList<>(JavaScalaConversionUtil.toJava(sink.getAppendResults()));
actual.sort(String::compareTo);
List<String> expected = Arrays.asList("1,a", "1,a", "1,a", "2,b", "2,b", "2,b");
expected.sort(String::compareTo);
assertEquals(expected, actual);
// cancel the job
jobClient.cancel();
}
private void testSourceConfig(boolean fallbackMR, boolean inferParallelism) throws Exception {
HiveTableFactory tableFactorySpy = spy((HiveTableFactory) hiveCatalog.getTableFactory().get());
doAnswer(invocation -> {
TableSourceFactory.Context context = invocation.getArgument(0);
return new TestConfigSource(
new JobConf(hiveCatalog.getHiveConf()),
context.getConfiguration(),
context.getObjectIdentifier().toObjectPath(),
context.getTable(),
fallbackMR,
inferParallelism);
}).when(tableFactorySpy).createTableSource(any(TableSourceFactory.Context.class));
HiveCatalog catalogSpy = spy(hiveCatalog);
doReturn(Optional.of(tableFactorySpy)).when(catalogSpy).getTableFactory();
TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode();
tableEnv.getConfig().getConfiguration().setBoolean(
HiveOptions.TABLE_EXEC_HIVE_FALLBACK_MAPRED_READER, fallbackMR);
tableEnv.getConfig().getConfiguration().setBoolean(
HiveOptions.TABLE_EXEC_HIVE_INFER_SOURCE_PARALLELISM, inferParallelism);
tableEnv.getConfig().getConfiguration().setInteger(
ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 2);
tableEnv.registerCatalog(catalogSpy.getName(), catalogSpy);
tableEnv.useCatalog(catalogSpy.getName());
List<Row> results = Lists.newArrayList(
tableEnv.sqlQuery("select * from db1.src order by x").execute().collect());
assertEquals("[1,a, 2,b]", results.toString());
}
@Test
public void testParquetCaseInsensitive() throws Exception {
testCaseInsensitive("parquet");
}
private void testCaseInsensitive(String format) throws Exception {
TableEnvironment tEnv = createTableEnvWithHiveCatalog(hiveCatalog);
String folderURI = TEMPORARY_FOLDER.newFolder().toURI().toString();
// Flink to write sensitive fields to parquet file
tEnv.executeSql(String.format(
"create table parquet_t (I int, J int) with (" +
"'connector'='filesystem','format'='%s','path'='%s')",
format,
folderURI));
waitForJobFinish(tEnv.executeSql("insert into parquet_t select 1, 2"));
tEnv.executeSql("drop table parquet_t");
// Hive to read parquet file
tEnv.getConfig().setSqlDialect(SqlDialect.HIVE);
tEnv.executeSql(String.format(
"create external table parquet_t (i int, j int) stored as %s location '%s'",
format,
folderURI));
Assert.assertEquals(
Row.of(1, 2),
tEnv.executeSql("select * from parquet_t").collect().next());
}
/**
* A sub-class of HiveTableSource to test vector reader switch.
*/
private static class TestConfigSource extends HiveTableSource {
private final boolean fallbackMR;
private final boolean inferParallelism;
TestConfigSource(
JobConf jobConf,
ReadableConfig flinkConf,
ObjectPath tablePath,
CatalogTable catalogTable,
boolean fallbackMR,
boolean inferParallelism) {
super(jobConf, flinkConf, tablePath, catalogTable);
this.fallbackMR = fallbackMR;
this.inferParallelism = inferParallelism;
}
@Override
public DataStream<RowData> getDataStream(StreamExecutionEnvironment execEnv) {
DataStreamSource<RowData> dataStream = (DataStreamSource<RowData>) super.getDataStream(execEnv);
int parallelism = dataStream.getTransformation().getParallelism();
assertEquals(inferParallelism ? 1 : 2, parallelism);
return dataStream;
}
@Override
HiveTableInputFormat getInputFormat(
List<HiveTablePartition> allHivePartitions,
boolean useMapRedReader) {
assertEquals(useMapRedReader, fallbackMR);
return super.getInputFormat(allHivePartitions, useMapRedReader);
}
}
// A sub-class of HiveCatalog to test list partitions by filter.
private static class TestPartitionFilterCatalog extends HiveCatalog {
private boolean fallback = false;
TestPartitionFilterCatalog(String catalogName, String defaultDatabase,
@Nullable HiveConf hiveConf, String hiveVersion) {
super(catalogName, defaultDatabase, hiveConf, hiveVersion, true);
}
@Override
public List<CatalogPartitionSpec> listPartitions(ObjectPath tablePath) throws TableNotExistException, TableNotPartitionedException, CatalogException {
fallback = true;
return super.listPartitions(tablePath);
}
}
}
| |
package com.saucelabs.appium;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import io.appium.java_client.AppiumDriver;
import io.appium.java_client.MobileElement;
import io.appium.java_client.ios.IOSDriver;
import io.appium.java_client.ios.IOSElement;
import java.io.File;
import java.net.URL;
import java.util.List;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.util.EntityUtils;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.openqa.selenium.Alert;
import org.openqa.selenium.Dimension;
import org.openqa.selenium.NoSuchElementException;
import org.openqa.selenium.OutputType;
import org.openqa.selenium.Point;
import org.openqa.selenium.TakesScreenshot;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.interactions.Actions;
import org.openqa.selenium.remote.Augmenter;
import org.openqa.selenium.remote.DesiredCapabilities;
/**
* <a href="https://github.com/appium/appium">Appium</a> test which runs against a local Appium instance deployed
* with the 'UICatalog' iPhone project which is included in the Appium source distribution.
*
* @author Ross Rowe
*/
@SuppressWarnings("deprecation")
public class UICatalogTest {
private AppiumDriver<IOSElement> driver;
private WebElement row;
@Before
public void setUp() throws Exception {
// set up appium
File classpathRoot = new File(System.getProperty("user.dir"));
File appDir = new File(classpathRoot, "../../../apps/UICatalog/build/Release-iphonesimulator");
File app = new File(appDir, "UICatalog.app");
DesiredCapabilities capabilities = new DesiredCapabilities();
capabilities.setCapability("platformVersion", "8.1");
capabilities.setCapability("deviceName", "iPhone 6");
capabilities.setCapability("app", app.getAbsolutePath());
driver = new IOSDriver<>(new URL("http://127.0.0.1:4723/wd/hub"), capabilities);
}
@After
public void tearDown() throws Exception {
driver.quit();
}
private void openMenuPosition(int index) {
//populate text fields with two random number
MobileElement table = (MobileElement)driver.findElementByClassName("UIATableView");
row = table.findElementsByClassName("UIATableCell").get(index);
row.click();
}
private Point getCenter(WebElement element) {
Point upperLeft = element.getLocation();
Dimension dimensions = element.getSize();
return new Point(upperLeft.getX() + dimensions.getWidth()/2, upperLeft.getY() + dimensions.getHeight()/2);
}
@Test
public void testFindElement() throws Exception {
//first view in UICatalog is a table
IOSElement table = driver.findElementByClassName("UIATableView");
assertNotNull(table);
//is number of cells/rows inside table correct
List<MobileElement> rows = table.findElementsByClassName("UIATableCell");
assertEquals(12, rows.size());
//is first one about buttons
assertEquals("Buttons, Various uses of UIButton", rows.get(0).getAttribute("name"));
//navigationBar is not inside table
WebElement nav_bar = null;
try {
nav_bar = table.findElementByClassName("UIANavigationBar");
} catch (NoSuchElementException e) {
//expected
}
assertNull(nav_bar);
//there is nav bar inside the app
driver.getPageSource();
nav_bar = driver.findElementByClassName("UIANavigationBar");
assertNotNull(nav_bar);
}
@Test
public void test_location() {
//get third row location
row = driver.findElementsByClassName("UIATableCell").get(2);
assertEquals(0, row.getLocation().getX());
assertEquals(152, row.getLocation().getY());
}
@Test
public void testScreenshot() {
//make screenshot and get is as base64
WebDriver augmentedDriver = new Augmenter().augment(driver);
String screenshot = ((TakesScreenshot) augmentedDriver).getScreenshotAs(OutputType.BASE64);
assertNotNull(screenshot);
//make screenshot and save it to the local filesystem
File file = ((TakesScreenshot) augmentedDriver).getScreenshotAs(OutputType.FILE);
assertNotNull(file);
}
@Test
public void testTextFieldEdit() {
//go to the text fields section
openMenuPosition(2);
WebElement text_field = driver.findElementsByClassName("UIATextField").get(0);
//get default/empty text
String default_val = text_field.getAttribute("value");
//write some random text to element
String rnd_string = RandomStringUtils.randomAlphanumeric(6);
text_field.sendKeys(rnd_string);
assertEquals(rnd_string, text_field.getAttribute("value"));
//send some random keys
String rnd_string2 = RandomStringUtils.randomAlphanumeric(6);
Actions swipe = new Actions(driver).sendKeys(rnd_string2);
swipe.perform();
//check if text is there
assertEquals(rnd_string + rnd_string2, text_field.getAttribute("value"));
//clear
text_field.clear();
//check if is empty/has default text
assertEquals(default_val, text_field.getAttribute("value"));
}
@Test
public void testAlertInteraction() {
//go to the alerts section
openMenuPosition(10);
//trigger modal alert with cancel & ok buttons
List<IOSElement> triggerOkCancel = driver.findElementsByAccessibilityId("Show OK-Cancel");
triggerOkCancel.get(1).click();
Alert alert = driver.switchTo().alert();
//check if title of alert is correct
assertEquals("UIAlertView <Alert message>", alert.getText());
alert.accept();
}
@Test
public void testScroll() {
//scroll menu
//get initial third row location
row = driver.findElementsByClassName("UIATableCell").get(2);
Point location1 = row.getLocation();
Point center = getCenter(row);
//perform swipe gesture
driver.swipe(center.getX(), center.getY(), center.getX(), center.getY()-20, 1);
//get new row coordinates
Point location2 = row.getLocation();
assertEquals(location1.getX(), location2.getX());
assertNotSame(location1.getY(), location2.getY());
}
@Test
public void testSlider() {
//go to controls
openMenuPosition(1);
//get the slider
WebElement slider = driver.findElementByClassName("UIASlider");
assertEquals("50%", slider.getAttribute("value"));
Point sliderLocation = getCenter(slider);
driver.swipe(sliderLocation.getX(), sliderLocation.getY(), sliderLocation.getX()-100, sliderLocation.getY(), 1);
assertEquals("0%", slider.getAttribute("value"));
}
@Test
public void testSessions() throws Exception {
HttpGet request = new HttpGet("http://localhost:4723/wd/hub/sessions");
@SuppressWarnings("resource")
HttpClient httpClient = new DefaultHttpClient();
HttpResponse response = httpClient.execute(request);
HttpEntity entity = response.getEntity();
JSONObject jsonObject = (JSONObject) new JSONParser().parse(EntityUtils.toString(entity));
String sessionId = driver.getSessionId().toString();
assertEquals(jsonObject.get("sessionId"), sessionId);
}
@Test
public void testSize() {
Dimension table = driver.findElementByClassName("UIATableView").getSize();
Dimension cell = driver.findElementsByClassName("UIATableCell").get(0).getSize();
assertEquals(table.getWidth(), cell.getWidth());
assertNotSame(table.getHeight(), cell.getHeight());
}
@Test
public void testSource() {
//get main view soruce
String source_main = driver.getPageSource();
assertTrue(source_main.contains("UIATableView"));
assertTrue(source_main.contains("TextFields, Uses of UITextField"));
//got to text fields section
openMenuPosition(2);
String source_textfields = driver.getPageSource();
assertTrue(source_textfields.contains("UIAStaticText"));
assertTrue(source_textfields.contains("TextFields"));
assertNotSame(source_main, source_textfields);
}
}
| |
/*
* Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.storage.impl.local;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.orient.core.OConstants;
import com.orientechnologies.orient.core.config.OStorageTxConfiguration;
import com.orientechnologies.orient.core.storage.OPhysicalPosition;
/**
* Handle the records that wait to be committed. This class is not synchronized because the caller is responsible of it.<br/>
* Record structure:<br/>
* <br/>
* +--------+--------+---------+---------+---------+----------------+-------------+<br/>
* | STATUS | OPERAT | REQ ID .| TX ID . | CLUSTER | CLUSTER OFFSET | DATA OFFSET |<br/>
* | 1 byte | 1 byte | 2 bytes | 4 bytes | 2 bytes | 8 bytes ...... | 8 bytes ... |<br/>
* +--------+--------+---------+---------+---------+----------------+-------------+<br/>
* = 26 bytes<br/>
*/
public class OTxSegment extends OSingleFileSegment {
public static final byte STATUS_FREE = 0;
public static final byte STATUS_COMMITTING = 1;
public static final byte OPERATION_CREATE = 0;
public static final byte OPERATION_DELETE = 1;
public static final byte OPERATION_UPDATE = 2;
private static final int DEF_START_SIZE = 262144;
private static final int RECORD_SIZE = 26;
public OTxSegment(final OStorageLocal iStorage, final OStorageTxConfiguration iConfig) throws IOException {
super(iStorage, iConfig);
}
/**
* Starts the recovery of pending transactions, if any
*/
@Override
public boolean open() throws IOException {
acquireExclusiveLock();
try {
// IGNORE IF IT'S SOFTLY CLOSED
super.open();
// CHECK FOR PENDING TRANSACTION ENTRIES TO RECOVER
int size = (file.getFilledUpTo() / RECORD_SIZE);
if (size == 0)
return true;
recoverTransactions();
return true;
} finally {
releaseExclusiveLock();
}
}
@Override
public void create(final int iStartSize) throws IOException {
super.create(iStartSize > -1 ? iStartSize : DEF_START_SIZE);
}
/**
* Append a log entry
*
* @param iReqId
* The id of requester
* @param iTxId
* The id of transaction
*
* @throws IOException
*/
public void addLog(final byte iOperation, final int iReqId, final int iTxId, final int iClusterId, final long iPosition,
final long iDataOffset) throws IOException {
acquireExclusiveLock();
try {
int offset = file.allocateSpace(RECORD_SIZE);
file.writeByte(offset, STATUS_COMMITTING);
offset += OConstants.SIZE_BYTE;
file.writeByte(offset, iOperation);
offset += OConstants.SIZE_BYTE;
file.writeShort(offset, (short) iReqId);
offset += OConstants.SIZE_SHORT;
file.writeInt(offset, iTxId);
offset += OConstants.SIZE_INT;
file.writeShort(offset, (short) iClusterId);
offset += OConstants.SIZE_SHORT;
file.writeLong(offset, iPosition);
offset += OConstants.SIZE_LONG;
file.writeLong(offset, iDataOffset);
synchRecord();
} finally {
releaseExclusiveLock();
}
}
/**
* Clear all the transaction entries by setting the status to STATUS_CLEARING
*
* @param iReqId
* The id of requester
* @param iTxId
* The id of transaction
*
* @throws IOException
*/
public void clearLogEntries(final int iReqId, final int iTxId) throws IOException {
acquireExclusiveLock();
try {
int size = (file.getFilledUpTo() / RECORD_SIZE);
byte status;
int reqId;
int txId;
int offset;
int recordFreed = 0;
for (int i = 0; i < size; ++i) {
// READ THE STATUS
offset = i * RECORD_SIZE;
status = file.readByte(offset);
if (status == STATUS_COMMITTING) {
// READ THE REQ-ID
offset += OConstants.SIZE_BYTE + OConstants.SIZE_BYTE;
reqId = file.readShort(offset);
if (reqId == iReqId) {
// CURRENT REQUESTER, CHECKING FOR THE TX
offset += OConstants.SIZE_SHORT;
txId = file.readInt(offset);
if (txId == iTxId) {
// CURRENT REQUESTER & TX: CLEAR THE ENTRY BY WRITING THE "FREE" STATUS
file.writeByte(i * RECORD_SIZE, STATUS_FREE);
recordFreed++;
}
}
}
}
// SHRINK THE FILE TO THE LAST GOOD POSITION. USE THE COUNTER OF PREVIOUS CYCLE TO DETERMINE THE NUMBER OF RECORDS FREED FOR
// THIS TX
int lastRecord = size - 1;
for (int i = size - 1; i > -1 && recordFreed > 0; --i) {
offset = i * RECORD_SIZE;
status = file.readByte(offset);
offset += OConstants.SIZE_BYTE;
offset += OConstants.SIZE_BYTE;
reqId = file.readShort(offset);
if (reqId != iReqId)
// NO MY REQ, EXIT
break;
offset += OConstants.SIZE_SHORT;
txId = file.readShort(offset);
if (txId != iTxId)
// NO MY TX, EXIT
break;
lastRecord = i;
recordFreed--;
}
if (lastRecord > -1)
file.shrink(lastRecord * RECORD_SIZE);
synchTx();
} finally {
releaseExclusiveLock();
}
}
public int getTotalLogCount() {
acquireSharedLock();
try {
return (file.getFilledUpTo() / RECORD_SIZE);
} finally {
releaseSharedLock();
}
}
private void recoverTransactions() throws IOException {
OLogManager.instance().info(
this,
"Started the recovering of pending transactions after a brute shutdown. Found " + getTotalLogCount()
+ " entry logs. Scanning...");
int recoveredTxs = 0;
int recoveredRecords = 0;
Map<Integer, Integer> txToRecover = scanForTransactionsToRecover();
for (Entry<Integer, Integer> entry : txToRecover.entrySet()) {
recoveredRecords += recoverTransaction(entry.getKey(), entry.getValue());
recoveredTxs++;
}
// EMPTY THE FILE
file.shrink(0);
OLogManager.instance().info(this, "Recovering successfully completed:");
OLogManager.instance().info(this, "- Recovered Tx.....: " + recoveredTxs);
OLogManager.instance().info(this, "- Recovered Records: " + recoveredRecords);
}
private Map<Integer, Integer> scanForTransactionsToRecover() throws IOException {
byte status;
int reqId;
int txId;
int offset;
// SCAN ALL THE FILE SEARCHING FOR THE TRANSACTIONS TO RECOVER
Map<Integer, Integer> txToRecover = new HashMap<Integer, Integer>();
Map<Integer, Integer> txToNotRecover = new HashMap<Integer, Integer>();
int size = (file.getFilledUpTo() / RECORD_SIZE);
for (int i = 0; i < size; ++i) {
offset = i * RECORD_SIZE;
status = file.readByte(offset);
offset += OConstants.SIZE_BYTE;
offset += OConstants.SIZE_BYTE;
reqId = file.readShort(offset);
offset += OConstants.SIZE_SHORT;
txId = file.readShort(offset);
switch (status) {
case STATUS_FREE:
// NOT RECOVER IT SINCE IF FIND AT LEAST ONE "FREE" STATUS MEANS THAT ALL THE LOGS WAS COMMITTED BUT THE USER DIDN'T
// RECEIVED THE ACK
txToNotRecover.put(reqId, txId);
break;
case STATUS_COMMITTING:
// TO RECOVER UNLESS THE REQ/TX IS IN THE MAP txToNotRecover
txToRecover.put(reqId, txId);
break;
}
}
// FILTER THE TX MAP TO RECOVER BY REMOVING THE TX WITH AT LEAST ONE "FREE" STATUS
Entry<Integer, Integer> entry;
for (Iterator<Entry<Integer, Integer>> it = txToRecover.entrySet().iterator(); it.hasNext();) {
entry = it.next();
if (txToNotRecover.containsKey(entry.getKey()) && txToNotRecover.get(entry.getKey()).equals(entry.getValue()))
// NO TO RECOVER SINCE AT LEAST ONE "FREE" STATUS MEANS THAT ALL THE LOGS WAS COMMITTED BUT THE USER DIDN'T
// RECEIVED THE ACK
it.remove();
}
return txToRecover;
}
/**
* Recover a transaction.
*
* @param iReqId
* @param iTxId
* @return Number of records recovered
*
* @throws IOException
*/
private int recoverTransaction(int iReqId, int iTxId) throws IOException {
byte status;
byte operation;
int reqId;
int txId;
int clusterId;
long clusterOffset;
long oldDataOffset;
int offset;
OPhysicalPosition ppos = new OPhysicalPosition();
int size = (file.getFilledUpTo() / RECORD_SIZE);
int recoveredTx = 0;
for (int i = 0; i < size; ++i) {
offset = i * RECORD_SIZE;
status = file.readByte(offset);
offset += OConstants.SIZE_BYTE;
if (status != STATUS_FREE) {
// DIRTY TX LOG ENTRY
operation = file.readByte(offset);
offset += OConstants.SIZE_BYTE;
reqId = file.readShort(offset);
if (reqId == iReqId) {
// REQ ID FOUND
offset += OConstants.SIZE_SHORT;
txId = file.readInt(offset);
if (txId == iTxId) {
// TX ID FOUND
offset += OConstants.SIZE_INT;
clusterId = file.readShort(offset);
offset += OConstants.SIZE_SHORT;
clusterOffset = file.readLong(offset);
offset += OConstants.SIZE_LONG;
oldDataOffset = file.readLong(offset);
recoverTransactionEntry(status, operation, reqId, txId, clusterId, clusterOffset, oldDataOffset, ppos);
recoveredTx++;
// CLEAR THE ENTRY BY WRITING '0'
file.writeByte(i * RECORD_SIZE + OConstants.SIZE_SHORT + OConstants.SIZE_INT, STATUS_FREE);
}
}
}
}
return recoveredTx;
}
private void recoverTransactionEntry(byte status, byte operation, int reqId, int txId, int clusterId, long clusterOffset,
long oldDataOffset, OPhysicalPosition ppos) throws IOException {
final OClusterLocal cluster = (OClusterLocal) storage.getClusterById(clusterId);
OLogManager.instance().info(this,
"Recovering tx <%d> by req <%d>. Operation <%d> was in status <%d> on record %d:%d in data space %d...", txId, reqId,
operation, status, clusterId, clusterOffset, oldDataOffset);
switch (operation) {
case OPERATION_CREATE:
// JUST DELETE THE RECORD
storage.deleteRecord(-1, clusterId, clusterOffset, -1);
break;
case OPERATION_UPDATE:
// RETRIEVE THE OLD RECORD
storage.getDataSegment(ppos.dataSegment).getRecordSize(oldDataOffset);
// RETRIEVE THE CURRENT PPOS
cluster.getPhysicalPosition(clusterOffset, ppos);
long newPosition = ppos.dataPosition;
int newSize = ppos.recordSize;
// REPLACE THE POSITION OF THE OLD RECORD
ppos.dataPosition = oldDataOffset;
// UPDATE THE PPOS WITH THE COORDS OF THE OLD RECORD
storage.getClusterById(clusterId).setPhysicalPosition(clusterOffset, ppos.dataSegment, oldDataOffset, ppos.type);
// CREATE A HOLE
storage.getDataSegment(ppos.dataSegment).createHole(newPosition, newSize);
break;
case OPERATION_DELETE:
// GET THE PPOS
cluster.getPhysicalPosition(clusterOffset, ppos);
// SAVE THE PPOS WITH THE VERSION TO 0 (VALID IF >-1)
cluster.updateVersion(clusterOffset, 0);
// REMOVE THE HOLE
cluster.removeHole(clusterOffset);
break;
}
}
private void synchRecord() {
if (((OStorageTxConfiguration) config).isSynchRecord())
file.synch();
}
private void synchTx() {
if (((OStorageTxConfiguration) config).isSynchTx())
file.synch();
}
}
| |
package org.jenkinsci.plugins.gitclient;
import com.cloudbees.jenkins.plugins.sshcredentials.impl.BasicSSHUserPrivateKey;
import com.cloudbees.plugins.credentials.CredentialsScope;
import com.cloudbees.plugins.credentials.CredentialsProvider;
import com.cloudbees.plugins.credentials.common.StandardCredentials;
import com.cloudbees.plugins.credentials.common.StandardUsernamePasswordCredentials;
import com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl;
import hudson.model.Fingerprint;
import hudson.util.LogTaskListener;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.StringJoiner;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.transport.RefSpec;
import org.eclipse.jgit.transport.RemoteConfig;
import org.eclipse.jgit.transport.URIish;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
import org.junit.After;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.Test;
import org.junit.Rule;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.jvnet.hudson.test.Issue;
import org.jvnet.hudson.test.JenkinsRule;
import org.json.simple.JSONObject;
import org.json.simple.JSONArray;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import org.junit.ClassRule;
/**
* Test authenticated operations with the git implementations.
* Uses contents of ~/.ssh/auth-data for parameterized tests.
* @author Mark Waite
*/
@RunWith(Parameterized.class)
public class CredentialsTest {
// Required for credentials use
@ClassRule
public static final JenkinsRule j = new JenkinsRule();
private final String gitImpl;
private final String gitRepoURL;
private final String username;
private final String password;
private final File privateKey;
private final String passphrase;
private final String fileToCheck;
private final Boolean submodules;
private final Boolean useParentCreds;
private final Boolean lfsSpecificTest;
private final char specialCharacter;
private final Boolean credentialsEmbeddedInURL;
private GitClient git;
private File repo;
private StandardCredentials testedCredential;
private final Random random = new Random();
@Rule
public TemporaryFolder tempFolder = new TemporaryFolder();
private int logCount;
private LogHandler handler;
private LogTaskListener listener;
private final static File HOME_DIR = new File(System.getProperty("user.home"));
private final static File SSH_DIR = new File(HOME_DIR, ".ssh");
private final static File DEFAULT_PRIVATE_KEY = new File(SSH_DIR, "id_rsa");
/* Directory containing local private keys for tests */
private final static File AUTH_DATA_DIR = new File(SSH_DIR, "auth-data");
private final static File CURR_DIR = new File(".");
private static long firstTestStartTime = 0;
/* Windows refuses directory names with '*', '<', '>', '|', '?', and ':' */
private final String SPECIALS_TO_CHECK = "%()`$&{}[]"
+ (isWindows() ? "" : "*<>:|?");
private static int specialsIndex = 0;
public CredentialsTest(String gitImpl, String gitRepoUrl, String username, String password, File privateKey, String passphrase, String fileToCheck, Boolean submodules, Boolean useParentCreds, Boolean credentialsEmbeddedInURL, Boolean lfsSpecificTest) {
this.gitImpl = gitImpl;
this.gitRepoURL = gitRepoUrl;
this.privateKey = privateKey;
this.passphrase = passphrase;
this.username = username;
this.password = password;
this.fileToCheck = fileToCheck;
this.submodules = submodules;
this.useParentCreds = useParentCreds;
this.lfsSpecificTest = lfsSpecificTest;
this.specialCharacter = SPECIALS_TO_CHECK.charAt(specialsIndex);
this.credentialsEmbeddedInURL = credentialsEmbeddedInURL;
specialsIndex = specialsIndex + 1;
if (specialsIndex >= SPECIALS_TO_CHECK.length()) {
specialsIndex = 0;
}
if (firstTestStartTime == 0) {
firstTestStartTime = System.currentTimeMillis();
}
}
@Before
public void setUp() throws IOException, InterruptedException {
git = null;
repo = tempFolder.newFolder();
/* Use a repo with a special character in name - JENKINS-43931 */
String newDirName = "use " + specialCharacter + " dir";
File repoParent = repo;
repo = new File(repoParent, newDirName);
boolean dirCreated = repo.mkdirs();
assertTrue("Failed to create " + repo.getAbsolutePath(), dirCreated);
File repoTemp = new File(repoParent, newDirName + "@tmp"); // use adjacent temp directory
dirCreated = repoTemp.mkdirs();
assertTrue("Failed to create " + repoTemp.getAbsolutePath(), dirCreated);
Logger logger = Logger.getLogger(this.getClass().getPackage().getName() + "-" + logCount++);
handler = new LogHandler();
handler.setLevel(Level.ALL);
logger.setUseParentHandlers(false);
logger.addHandler(handler);
logger.setLevel(Level.ALL);
listener = new hudson.util.LogTaskListener(logger, Level.ALL);
git = Git.with(listener, new hudson.EnvVars()).in(repo).using(gitImpl).getClient();
assertTrue("Bad username, password, privateKey combo: '" + username + "', '" + password + "'",
(password == null || password.isEmpty()) ^ (privateKey == null || !privateKey.exists()));
if (password != null && !password.isEmpty()) {
testedCredential = newUsernamePasswordCredential(username, password);
}
if (privateKey != null && privateKey.exists()) {
testedCredential = newPrivateKeyCredential(username, privateKey);
}
if (!credentialsEmbeddedInURL) {
assertThat(testedCredential, notNullValue());
Fingerprint fingerprint = CredentialsProvider.getFingerprintOf(testedCredential);
assertThat("Fingerprint should not be set", fingerprint, nullValue());
}
}
@Before
public void enableSETSID() {
CliGitAPIImpl.CALL_SETSID = gitImpl.equals("git") && privateKey != null && passphrase != null;
}
@After
public void checkFingerprintNotSet() throws Exception {
/* Since these are API level tests, they should not track credential usage */
/* Credential usage is tracked at the job / project level */
Fingerprint fingerprint = CredentialsProvider.getFingerprintOf(testedCredential);
assertThat("Fingerprint should not be set after API level use", fingerprint, nullValue());
}
@After
public void clearCredentials() {
if (git != null) {
git.clearCredentials();
}
}
@After
public void disableSETSID() {
org.jenkinsci.plugins.gitclient.CliGitAPIImpl.CALL_SETSID = false;
}
private BasicSSHUserPrivateKey newPrivateKeyCredential(String username, File privateKey) throws IOException {
CredentialsScope scope = CredentialsScope.GLOBAL;
String id = "private-key-" + privateKey.getPath() + random.nextInt();
String privateKeyData = FileUtils.readFileToString(privateKey, StandardCharsets.UTF_8);
BasicSSHUserPrivateKey.PrivateKeySource privateKeySource = new BasicSSHUserPrivateKey.DirectEntryPrivateKeySource(privateKeyData);
String description = "private key from " + privateKey.getPath();
if (this.passphrase != null) {
description = description + " passphrase '" + this.passphrase + "'";
}
return new BasicSSHUserPrivateKey(scope, id, username, privateKeySource, this.passphrase, description);
}
private StandardUsernamePasswordCredentials newUsernamePasswordCredential(String username, String password) {
CredentialsScope scope = CredentialsScope.GLOBAL;
String id = "username-" + username + "-password-" + password + random.nextInt();
return new UsernamePasswordCredentialsImpl(scope, id, "desc: " + id, username, password);
}
private static boolean isCredentialsSupported() throws IOException, InterruptedException {
CliGitAPIImpl cli = (CliGitAPIImpl) Git.with(null, new hudson.EnvVars()).in(CURR_DIR).using("git").getClient();
return cli.isAtLeastVersion(1, 7, 9, 0);
}
private boolean isShallowCloneSupported(String implementation, GitClient gitClient) {
if (!implementation.equals("git")) {
return false;
}
CliGitAPIImpl cli = (CliGitAPIImpl) gitClient;
return cli.isAtLeastVersion(1, 9, 0, 0);
}
@Parameterized.Parameters(name = "Impl:{0} User:{2} Pass:{3} Embed:{9} Phrase:{5} URL:{1}")
public static Collection gitRepoUrls() throws IOException, InterruptedException, ParseException {
List<Object[]> repos = new ArrayList<>();
String[] implementations = isCredentialsSupported() ? new String[]{"git", "jgit", "jgitapache"} : new String[]{"jgit", "jgitapache"};
for (String implementation : implementations) {
/* Add upstream repository as authentication test with private
* key of current user. Try to test at least one
* authentication case, even if there is no repos.json file in
* the external directory.
*/
if (DEFAULT_PRIVATE_KEY.exists()) {
String username = System.getProperty("user.name");
String url = "https://github.com/jenkinsci/git-client-plugin.git";
/* Add URL if it matches the pattern */
if (URL_MUST_MATCH_PATTERN.matcher(url).matches()) {
Object[] masterRepo = {implementation, url, username, null, DEFAULT_PRIVATE_KEY, null, "README.adoc", false, false, false, false};
repos.add(masterRepo);
}
}
/* Add additional repositories if the ~/.ssh/auth-data directory
* contains a repos.json file defining the repositories to test and the
* authentication data to use for those tests.
*/
File authDataDefinitions = new File(AUTH_DATA_DIR, "repos.json");
if (authDataDefinitions.exists()) {
JSONParser parser = new JSONParser();
Object obj = parser.parse(new FileReader(authDataDefinitions));
JSONArray authEntries = (JSONArray) obj;
for (Object entryObj : authEntries) {
JSONObject entry = (JSONObject) entryObj;
String skipIf = (String) entry.get("skipif");
String repoURL = (String) entry.get("url");
String username = (String) entry.get("username");
String password = (String) entry.get("password");
String fileToCheck = (String) entry.get("file");
if (skipIf != null) {
if (skipIf.equals(implementation)) {
continue;
}
if (implementation.startsWith("jgit") && skipIf.equals("jgit")) { // Treat jgitapache like jgit
continue;
}
}
if (fileToCheck == null) {
fileToCheck = "README.adoc";
}
Boolean submodules = (Boolean) entry.get("submodules");
if (submodules == null) {
submodules = false;
}
Boolean useParentCreds = (Boolean) entry.get("parentcreds");
if (useParentCreds == null) {
useParentCreds = false;
}
Boolean lfsSpecificTest = (Boolean) entry.get("lfsSpecificTest");
if (lfsSpecificTest == null) {
lfsSpecificTest = false;
}
String keyfile = (String) entry.get("keyfile");
File privateKey = null;
if (keyfile != null) {
privateKey = new File(AUTH_DATA_DIR, keyfile);
if (!privateKey.exists()) {
privateKey = null;
}
}
String passphrase = (String) entry.get("passphrase");
if (passphrase != null && passphrase.trim().isEmpty()) {
passphrase = null;
}
if (passphrase != null && privateKey == null) {
System.out.println("Non-empty passphrase, private key file '" + keyfile + "' not found");
continue;
}
if (repoURL == null) {
System.out.println("No repository URL provided.");
continue;
}
/* Add URL if it matches the pattern */
if (URL_MUST_MATCH_PATTERN.matcher(repoURL).matches()) {
Object[] repo = {implementation, repoURL, username, password, privateKey, passphrase, fileToCheck, submodules, useParentCreds, false, lfsSpecificTest};
repos.add(repo);
/* Add embedded credentials test case if valid username, valid password, CLI git, and http protocol */
if (username != null && !username.matches(".*[@:].*") && // Skip special cases of username
password != null && !password.matches(".*[@:].*") && // Skip special cases of password
implementation.equals("git") && // Embedded credentials only implemented for CLI git
repoURL.startsWith("http")) {
/* Use existing username and password to create an embedded credentials test case */
String repoURLwithCredentials = repoURL.replaceAll("(https?://)(.*@)?(.*)", "$1" + username + ":" + password + "@$3");
Object[] repoWithCredentials = {implementation, repoURLwithCredentials, username, password, privateKey, passphrase, fileToCheck, submodules, useParentCreds, true, lfsSpecificTest};
repos.add(0, repoWithCredentials);
}
}
}
}
}
Collections.shuffle(repos); // randomize test order
// If we're not testing all credentials, take 6 or less
return TEST_ALL_CREDENTIALS ? repos : repos.subList(0, Math.min(repos.size(), 6));
}
private void gitFetch(String source) throws Exception {
gitFetch(source, "master", true);
}
private void gitFetch(String source, String branch, Boolean allowShallowClone) throws Exception {
/* Save some bandwidth with shallow clone for CliGit, not yet available for JGit */
URIish sourceURI = new URIish(source);
List<RefSpec> refSpecs = new ArrayList<>();
refSpecs.add(new RefSpec("+refs/heads/"+branch+":refs/remotes/origin/"+branch+""));
FetchCommand cmd = git.fetch_().from(sourceURI, refSpecs).tags(false);
if (isShallowCloneSupported(gitImpl, git)) {
// Reduce network transfer by using shallow clone
// JGit does not support shallow clone
cmd.shallow(true).depth(1);
}
cmd.execute();
}
private String listDir(File dir) {
File[] files = repo.listFiles();
StringJoiner joiner = new StringJoiner(",");
for (File file : files) {
joiner.add(file.getName());
}
return joiner.toString();
}
private void addCredential() {
//Begin - JENKINS-56257
//Credential need not be added when supplied in the URL
if (this.credentialsEmbeddedInURL) {
return;
}
//End - JENKINS-56257
// Always use addDefaultCredentials
git.addDefaultCredentials(testedCredential);
// addCredential stops tests to prompt for passphrase
// addCredentials fails some github username / password tests
// git.addCredentials(gitRepoURL, testedCredential);
}
/**
* Returns true if another test should not be allowed to start.
* JenkinsRule test timeout defaults to 180 seconds.
*
* @return true if another test should not be allowed to start
*/
private boolean testPeriodExpired() {
return (System.currentTimeMillis() - firstTestStartTime) > ((180 - 70) * 1000L);
}
@Test
@Issue("JENKINS-50573")
public void testFetchWithCredentials() throws Exception {
if (testPeriodExpired() || lfsSpecificTest) {
return;
}
File clonedFile = new File(repo, fileToCheck);
git.init_().workspace(repo.getAbsolutePath()).execute();
assertFalse("file " + fileToCheck + " in " + repo + ", has " + listDir(repo), clonedFile.exists());
addCredential();
/* Fetch with remote URL */
gitFetch(gitRepoURL);
git.setRemoteUrl("origin", gitRepoURL);
/* Fetch with remote name "origin" instead of remote URL */
gitFetch("origin");
ObjectId master = git.getHeadRev(gitRepoURL, "master");
git.checkout().branch("master").ref(master.getName()).deleteBranchIfExist(true).execute();
if (submodules) {
git.submoduleInit();
SubmoduleUpdateCommand subcmd = git.submoduleUpdate().parentCredentials(useParentCreds);
subcmd.execute();
}
assertTrue("master: " + master + " not in repo", git.isCommitInRepo(master));
assertEquals("Master != HEAD", master, git.withRepository((gitRepo, unusedChannel)-> gitRepo.findRef("master").getObjectId()));
assertEquals("Wrong branch", "master", git.withRepository((gitRepo, unusedChanel) -> gitRepo.getBranch()));
assertTrue("No file " + fileToCheck + ", has " + listDir(repo), clonedFile.exists());
/* prune opens a remote connection to list remote branches */
git.prune(new RemoteConfig(git.withRepository((gitRepo, unusedChannel) -> gitRepo.getConfig()), "origin"));
}
@Test
public void testRemoteReferencesWithCredentials() throws Exception {
if (testPeriodExpired()) {
return;
}
addCredential();
Map<String, ObjectId> remoteReferences;
switch (random.nextInt(4)) {
default:
case 0:
remoteReferences = git.getRemoteReferences(gitRepoURL, null, true, false);
break;
case 1:
remoteReferences = git.getRemoteReferences(gitRepoURL, null, true, true);
break;
case 2:
remoteReferences = git.getRemoteReferences(gitRepoURL, "master", true, false);
break;
case 3:
remoteReferences = git.getRemoteReferences(gitRepoURL, "master", true, true);
break;
}
assertThat(remoteReferences.keySet(), hasItems("refs/heads/master"));
}
@Test
@Issue("JENKINS-50573")
public void isURIishRemote() throws Exception {
URIish uri = new URIish(gitRepoURL);
assertTrue("Should be remote but isn't: " + uri, uri.isRemote());
}
@Test
@Issue("JENKINS-45228")
public void testLfsMergeWithCredentials() throws Exception {
if (testPeriodExpired() || !lfsSpecificTest) {
return;
}
File clonedFile = new File(repo, fileToCheck);
git.init_().workspace(repo.getAbsolutePath()).execute();
assertFalse("file " + fileToCheck + " in " + repo + ", has " + listDir(repo), clonedFile.exists());
addCredential();
/* Fetch with remote name "origin" instead of remote URL */
git.setRemoteUrl("origin", gitRepoURL);
gitFetch("origin", "*", false);
ObjectId master = git.getHeadRev(gitRepoURL, "master");
git.checkout().branch("master").ref(master.getName()).lfsRemote("origin").deleteBranchIfExist(true).execute();
assertTrue("master: " + master + " not in repo", git.isCommitInRepo(master));
assertEquals("Master != HEAD", master, git.getRepository().findRef("master").getObjectId());
assertEquals("Wrong branch", "master", git.getRepository().getBranch());
assertTrue("No file " + fileToCheck + ", has " + listDir(repo), clonedFile.exists());
ObjectId modified_lfs = git.getHeadRev(gitRepoURL, "modified_lfs");
git.merge().setStrategy(MergeCommand.Strategy.DEFAULT).setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.FF).setRevisionToMerge(modified_lfs).execute();
assertEquals("Fast-forward merge failed. master and modified_lfs should be the same.", git.revParse("HEAD"), modified_lfs);
}
private boolean isWindows() {
return File.pathSeparatorChar == ';';
}
/* If not in a Jenkins job, then default to run all credentials tests.
*
* Developers without ~/.ssh/auth-data/repos.json will see no difference
* since minimal credentials tests are used for them.
*
* Developers with ~/.ssh/auth-data/repos.json will test all credentials by default.
*/
private static final String NOT_JENKINS = System.getProperty("JOB_NAME") == null ? "true" : "false";
private static final boolean TEST_ALL_CREDENTIALS = Boolean.parseBoolean(System.getProperty("TEST_ALL_CREDENTIALS", NOT_JENKINS));
private static final Pattern URL_MUST_MATCH_PATTERN = Pattern.compile(System.getProperty("URL_MUST_MATCH_PATTERN", ".*"));
}
| |
package ca.corefacility.bioinformatics.irida.ria.web.services;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.MessageSource;
import org.springframework.context.NoSuchMessageException;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Component;
import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException;
import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowParameterException;
import ca.corefacility.bioinformatics.irida.model.joins.Join;
import ca.corefacility.bioinformatics.irida.model.project.Project;
import ca.corefacility.bioinformatics.irida.model.project.ReferenceFile;
import ca.corefacility.bioinformatics.irida.model.sample.Sample;
import ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow;
import ca.corefacility.bioinformatics.irida.model.workflow.analysis.type.AnalysisType;
import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowDescription;
import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowDynamicSourceGalaxy;
import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowParameter;
import ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmissionTemplate;
import ca.corefacility.bioinformatics.irida.model.workflow.submission.IridaWorkflowNamedParameters;
import ca.corefacility.bioinformatics.irida.pipeline.results.AnalysisSubmissionSampleProcessor;
import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.GalaxyToolDataService;
import ca.corefacility.bioinformatics.irida.ria.utilities.FileUtilities;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.pipeline.SavePipelineParametersRequest;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.pipeline.SavedPipelineParameters;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.references.UIReferenceFile;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.ui.Input;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.ui.InputWithOptions;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.ui.SelectOption;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.projects.settings.dto.AnalysisTemplate;
import ca.corefacility.bioinformatics.irida.ria.web.launchPipeline.dtos.UIPipelineDetailsResponse;
import ca.corefacility.bioinformatics.irida.ria.web.pipelines.dto.Pipeline;
import ca.corefacility.bioinformatics.irida.security.permissions.sample.UpdateSamplePermission;
import ca.corefacility.bioinformatics.irida.service.AnalysisSubmissionService;
import ca.corefacility.bioinformatics.irida.service.ProjectService;
import ca.corefacility.bioinformatics.irida.service.ReferenceFileService;
import ca.corefacility.bioinformatics.irida.service.workflow.IridaWorkflowsService;
import ca.corefacility.bioinformatics.irida.service.workflow.WorkflowNamedParametersService;
import com.github.jmchilton.blend4j.galaxy.beans.TabularToolDataTable;
/**
* UI Service for all things related to workflow pipelines.
*/
@Component
public class UIPipelineService {
private static final Logger logger = LoggerFactory.getLogger(UIPipelineService.class);
private final UICartService cartService;
private final IridaWorkflowsService workflowsService;
private final WorkflowNamedParametersService namedParametersService;
private final ProjectService projectService;
private final ReferenceFileService referenceFileService;
private final AnalysisSubmissionSampleProcessor analysisSubmissionSampleProcessor;
private final UpdateSamplePermission updateSamplePermission;
private final GalaxyToolDataService galaxyToolDataService;
private final AnalysisSubmissionService analysisSubmissionService;
private final MessageSource messageSource;
@Autowired
public UIPipelineService(UICartService cartService, IridaWorkflowsService workflowsService,
WorkflowNamedParametersService namedParametersService, ProjectService projectService,
ReferenceFileService referenceFileService,
AnalysisSubmissionSampleProcessor analysisSubmissionSampleProcessor,
UpdateSamplePermission updateSamplePermission, GalaxyToolDataService galaxyToolDataService,
AnalysisSubmissionService analysisSubmissionService, MessageSource messageSource) {
this.cartService = cartService;
this.workflowsService = workflowsService;
this.namedParametersService = namedParametersService;
this.projectService = projectService;
this.referenceFileService = referenceFileService;
this.analysisSubmissionSampleProcessor = analysisSubmissionSampleProcessor;
this.updateSamplePermission = updateSamplePermission;
this.galaxyToolDataService = galaxyToolDataService;
this.analysisSubmissionService = analysisSubmissionService;
this.messageSource = messageSource;
}
/**
* Get the information about a specific workflow pipeline
*
* @param id for a {@link IridaWorkflow}
* @param locale current users {@link Locale}
* @return Details contained within a {@link UIPipelineDetailsResponse}
* @throws IridaWorkflowNotFoundException exception thrown if the workflow cannot be found.
*/
public UIPipelineDetailsResponse getPipelineDetails(UUID id, Locale locale) throws IridaWorkflowNotFoundException {
IridaWorkflow workflow = workflowsService.getIridaWorkflow(id);
IridaWorkflowDescription description = workflow.getWorkflowDescription();
UIPipelineDetailsResponse detailsResponse = new UIPipelineDetailsResponse();
/*
Prefix for getting messages from IRIDA message properties file
*/
String prefix = "workflow." + description.getAnalysisType()
.getType() + ".";
/*
Set up basic information for the pipeline being launched.
*/
detailsResponse.setName(messageSource.getMessage(prefix + "title", new Object[] {}, locale));
detailsResponse.setDescription(messageSource.getMessage(prefix + "description", new Object[] {}, locale));
detailsResponse.setType(description.getName());
/*
Add what projects are in the cart for sharing afterwards
*/
List<Project> projects = (List<Project>) projectService.readMultiple(cartService.getProjectIdsInCart());
List<SelectOption> projectsToShareWith = projects.stream()
.map(p -> new SelectOption(String.valueOf(p.getId()), p.getLabel()))
.collect(Collectors.toList());
detailsResponse.setProjects(projectsToShareWith);
/*
Add all pipeline parameters
*/
detailsResponse.setParameterWithOptions(getPipelineSpecificParametersWithOptions(description, locale));
/*
Add saved parameter sets
*/
detailsResponse.setSavedPipelineParameters(getSavedPipelineParameters(workflow, locale));
/*
Check / add reference files
*/
if (description.requiresReference()) {
detailsResponse.setRequiresReference(true);
detailsResponse.setReferenceFiles(getReferenceFilesForPipeline(projects));
}
/*
Can the pipeline write back
*/
Map<Project, List<Sample>> cart = cartService.getFullCart();
boolean canUpdateSamples = analysisSubmissionSampleProcessor.hasRegisteredAnalysisSampleUpdater(description.getAnalysisType());
if (canUpdateSamples) {
Authentication authentication = SecurityContextHolder.getContext()
.getAuthentication();
// Need to make sure that all samples are allowed to be updated.
List<Sample> samples = cart.values().stream().flatMap(Collection::stream).collect(Collectors.toList());
canUpdateSamples = samples
.stream()
.map(sample -> updateSamplePermission.isAllowed(authentication, sample))
.reduce(true, (a, b) -> a && b);
if (canUpdateSamples) {
detailsResponse.setUpdateSamples(messageSource.getMessage(
"workflow.label.share-analysis-samples." + description.getAnalysisType()
.getType(), new Object[] {}, locale));
}
}
/*
Set the acceptable file types
*/
detailsResponse.setAcceptsSingleSequenceFiles(description.acceptsSingleSequenceFiles());
detailsResponse.setAcceptsPairedSequenceFiles(description.acceptsPairedSequenceFiles());
/*
Dynamic Sources - these are pulled from Galaxy
*/
if (description.requiresDynamicSource()) {
List<InputWithOptions> dynamicSources = new ArrayList<>();
IridaWorkflowDynamicSourceGalaxy dynamicSource = new IridaWorkflowDynamicSourceGalaxy();
/*
Go through all the pipeline parameters and see which ones require dynamic sources.
*/
for (IridaWorkflowParameter parameter : description.getParameters()) {
if (parameter.isRequired() && parameter.hasDynamicSource()) {
try {
dynamicSource = parameter.getDynamicSource();
} catch (IridaWorkflowParameterException e) {
logger.debug("Dynamic Source error: ", e);
}
/*
Now that we have the info on the parameter lets get the available options for it, and
set up the data in a format the the UI can create a select input.
*/
try {
String dynamicSourceName = dynamicSource.getName();
String label = messageSource.getMessage("dynamicsource.label." + dynamicSourceName, new Object[] {}, locale);
List<SelectOption> options = new ArrayList<>();
TabularToolDataTable galaxyToolDataTable = galaxyToolDataService.getToolDataTable(dynamicSourceName);
List<String> labels = galaxyToolDataTable.getFieldsForColumn(dynamicSource.getDisplayColumn());
Iterator<String> labelsIterator = labels.iterator();
List<String> values = galaxyToolDataTable.getFieldsForColumn(
dynamicSource.getParameterColumn());
Iterator<String> valuesIterator = values.iterator();
while (labelsIterator.hasNext() && valuesIterator.hasNext()) {
options.add(new SelectOption(valuesIterator.next(), labelsIterator.next()));
}
dynamicSources.add(new InputWithOptions(parameter.getName(), label, options.get(0).getValue(), options));
} catch (Exception e) {
logger.debug("Tool Data Table not found: ", e);
}
}
}
detailsResponse.setDynamicSources(dynamicSources);
}
return detailsResponse;
}
/**
* Save a new set of {@link IridaWorkflowNamedParameters}
*
* @param id UUID identifier for w {@link IridaWorkflow}
* @param request details about the new set of saved pipeline parameters
* @param locale current users Locale
* @return the identifier for the new set
* @throws IridaWorkflowNotFoundException exception thrown if the workflow cannot be found.
*/
public SavedPipelineParameters saveNewPipelineParameters(UUID id, SavePipelineParametersRequest request,
Locale locale) throws IridaWorkflowNotFoundException {
IridaWorkflow workflow = workflowsService.getIridaWorkflow(id);
final String pipelineName = workflow.getWorkflowDescription()
.getName()
.toLowerCase();
IridaWorkflowNamedParameters namedParameters = new IridaWorkflowNamedParameters(request.getLabel(), id,
request.getParameters());
namedParameters = namedParametersService.create(namedParameters);
Map<String, String> updatedParams = namedParameters.getInputParameters();
List<Input> params = updatedParams.entrySet()
.stream()
.map(entry -> new Input(entry.getKey(),
messageSource.getMessage("pipeline.parameters." + pipelineName + "." + entry.getKey(),
new Object[] {}, locale), entry.getValue()))
.collect(Collectors.toList());
return new SavedPipelineParameters(namedParameters.getId(), namedParameters.getLabel(), params);
}
/**
* Get a list of pipeline workflows, if the automated flag is set then only those pipelines that can be run
* automated will be returned
*
* @param automated if true, then this is from a project for creating automated pipelines
* @param locale currently logged in users locale
* @return list of pipelines
*/
public List<Pipeline> getWorkflowTypes(Boolean automated, Locale locale) {
Set<AnalysisType> analysisTypes = workflowsService.getDisplayableWorkflowTypes();
List<Pipeline> pipelines = new ArrayList<>();
for (AnalysisType type : analysisTypes) {
try {
IridaWorkflow flow = workflowsService.getDefaultWorkflowByType(type);
IridaWorkflowDescription description = flow.getWorkflowDescription();
// if we're setting up an automated project, strip out all the multi-sample pipelines
if (!automated || description.getInputs().requiresSingleSample()) {
Pipeline workflow = createPipeline(type, locale);
pipelines.add(workflow);
}
} catch (IridaWorkflowNotFoundException e) {
logger.error("Cannot find IridaWorkFlow for '" + type.getType() + "'", e);
}
}
return pipelines.stream()
.sorted(Comparator.comparing(Pipeline::getName))
.collect(Collectors.toList());
}
/**
* List of existing automated workflows on a project
*
* @param projectId identifier for a project
* @param locale currently logged in users local
* @return list of existing automated workflows ({@link AnalysisTemplate}) for a project.
*/
public List<AnalysisTemplate> getProjectAnalysisTemplates(Long projectId, Locale locale) {
Project project = projectService.read(projectId);
List<AnalysisSubmissionTemplate> templates = analysisSubmissionService.getAnalysisTemplatesForProject(project);
return templates.stream()
.map(template -> {
UUID id = template.getWorkflowId();
String type;
try {
IridaWorkflow flow = workflowsService.getIridaWorkflow(id);
AnalysisType analysisType = flow.getWorkflowDescription()
.getAnalysisType();
type = messageSource.getMessage("workflow." + analysisType.getType() + ".title",
new Object[] {}, locale);
} catch (IridaWorkflowNotFoundException e) {
type = messageSource.getMessage("workflow.UNKNOWN.title", new Object[] {}, locale);
}
return new AnalysisTemplate(template.getId(), template.getName(), type, template.isEnabled(),
template.getStatusMessage());
})
.collect(Collectors.toList());
}
/**
* Remove an automated workflow from a project
*
* @param templateId identifier for the automated workflow ({@link AnalysisTemplate})
* @param projectId identifier for the project
* @param locale currently logged in users locale
* @return message to the user about the status of the removal
*/
public String removeProjectAutomatedPipeline(Long templateId, Long projectId, Locale locale) {
Project project = projectService.read(projectId);
AnalysisSubmissionTemplate template = analysisSubmissionService.readAnalysisSubmissionTemplateForProject(
templateId, project);
analysisSubmissionService.deleteAnalysisSubmissionTemplateForProject(templateId, project);
return messageSource.getMessage("server.AnalysisTemplates.remove", new Object[] { template.getName() }, locale);
}
/**
* Get a list of pipeline parameters that have specific options.
*
* @param description {@link IridaWorkflowDescription}
* @param locale {@link Locale} current users locale
* @return List of pipeline parameters with options
*/
private List<InputWithOptions> getPipelineSpecificParametersWithOptions(IridaWorkflowDescription description,
Locale locale) {
return description.getParameters()
.stream()
.filter(IridaWorkflowParameter::hasChoices)
.map(parameter -> {
String name = description.getName()
.toLowerCase();
String label = localizedParamLabel(locale, name, parameter.getName());
String defaultValue = parameter.getDefaultValue();
List<SelectOption> options = parameter.getChoices()
.stream()
.map(option -> new SelectOption(option.getValue(),
localizedParamOptionLabel(locale, name, parameter.getName(), option.getName())))
.collect(Collectors.toUnmodifiableList());
return new InputWithOptions(parameter.getName(), label, defaultValue, options);
})
.collect(Collectors.toUnmodifiableList());
}
/**
* Internationalize a parameter label. If there is not translation for it, just return the default text.
*
* @param locale current users {@link Locale}
* @param workflowName name of the current {@link IridaWorkflow}
* @param paramName name of the parameter to internationalize.
* @return the translated value
*/
private String localizedParamLabel(Locale locale, String workflowName, String paramName) {
final String messageName = "pipeline.parameters." + workflowName + "." + paramName;
try {
return messageSource.getMessage(messageName, null, locale);
} catch (NoSuchMessageException e) {
return paramName;
}
}
/**
* Internationalize a parameter option.
*
* @param locale current users {@link Locale}
* @param workflowName name of the current {@link IridaWorkflow
* @param paramName name of the parameter the option belong to
* @param optionName name of the option
* @return the translated value for the option
*/
private String localizedParamOptionLabel(Locale locale, String workflowName, String paramName, String optionName) {
String messageName = "pipeline.parameters." + workflowName + "." + paramName + "." + optionName;
try {
return messageSource.getMessage(messageName, null, locale);
} catch (NoSuchMessageException e) {
return paramName + "." + optionName;
}
}
/**
* Get a list of all saved named pipeline parameters for a workflow
*
* @param workflow - {@link IridaWorkflow}
* @param locale - currently logged in users locale
* @return list of {@link SavedPipelineParameters}
*/
private List<SavedPipelineParameters> getSavedPipelineParameters(IridaWorkflow workflow, Locale locale) {
IridaWorkflowDescription description = workflow.getWorkflowDescription();
List<IridaWorkflowParameter> workflowParameters = description.getParameters();
String pipelineName = description.getName()
.toLowerCase();
List<SavedPipelineParameters> savedParameters = new ArrayList<>();
/*
If there are no parameters just return an empty list.
*/
if (workflowParameters == null) {
return savedParameters;
}
/*
Get the default parameter set
*/
List<Input> defaultParameters = workflowParameters.stream()
.filter(p -> !p.isRequired())
.map(p -> new Input(p.getName(),
messageSource.getMessage("pipeline.parameters." + pipelineName + "." + p.getName(),
new Object[] {}, locale), p.getDefaultValue()))
.collect(Collectors.toList());
savedParameters.add(new SavedPipelineParameters(0L,
messageSource.getMessage("workflow.parameters.named.default", new Object[] {}, locale),
defaultParameters));
/*
Add any saved parameter sets
*/
List<IridaWorkflowNamedParameters> namedParameters = namedParametersService.findNamedParametersForWorkflow(
workflow.getWorkflowIdentifier());
savedParameters.addAll(namedParameters.stream()
.map(wp -> {
Map<String, String> inputParameter = wp.getInputParameters();
// Go through the parameters and see which ones are getting overwritten.
List<Input> parameters = defaultParameters.stream()
.map(parameter -> {
if (inputParameter.containsKey(parameter.getName())) {
return new Input(parameter.getName(), parameter.getLabel(),
inputParameter.get(parameter.getName()));
}
return new Input(parameter.getName(), parameter.getLabel(), parameter.getValue());
})
.collect(Collectors.toList());
return new SavedPipelineParameters(wp.getId(), wp.getLabel(), parameters);
})
.collect(Collectors.toList()));
return savedParameters;
}
/**
* Get a list of reference files found within project that have sample in the cart.
*
* @param projects List of projects that have samples in the cart
* @return List of reference files for consumption by the UI.
*/
private List<UIReferenceFile> getReferenceFilesForPipeline(List<Project> projects) {
return projects.stream()
.map(project -> {
List<UIReferenceFile> list = new ArrayList<>();
for (Join<Project, ReferenceFile> projectReferenceFileJoin : referenceFileService.getReferenceFilesForProject(
project)) {
try {
ReferenceFile file = projectReferenceFileJoin.getObject();
Path path = file.getFile();
String filesize = FileUtilities.humanReadableByteCount(Files.size(path), true);
UIReferenceFile uiReferenceFile = new UIReferenceFile(projectReferenceFileJoin, filesize);
list.add(uiReferenceFile);
} catch (IOException e) {
logger.error(e.getMessage());
}
}
return list;
})
.flatMap(List::stream)
.collect(Collectors.toList());
}
/**
* Create a Pipeline for consumption by the UI
*
* @param analysisType {@link AnalysisType} type of analysis pipeline
* @param locale {@link Locale}
* @return {@link Pipeline}
* @throws IridaWorkflowNotFoundException thrown if {@link IridaWorkflowDescription} is not found
*/
private Pipeline createPipeline(AnalysisType analysisType, Locale locale) throws IridaWorkflowNotFoundException {
IridaWorkflowDescription workflowDescription = workflowsService.getDefaultWorkflowByType(analysisType)
.getWorkflowDescription();
String prefix = "workflow." + analysisType.getType();
String name = messageSource.getMessage(prefix + ".title", new Object[]{}, locale);
String description = messageSource.getMessage(prefix + ".description", new Object[]{}, locale);
UUID id = workflowDescription.getId();
String styleName = analysisType.getType();
return new Pipeline(name, description, id, styleName);
}
}
| |
/*
* Copyright 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.groestlcoin.store;
import com.google.groestlcoin.core.*;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import java.io.Serializable;
import java.util.*;
/**
* A StoredTransaction message contains the information necessary to check a transaction later (ie after a reorg).
* It is used to avoid having to store the entire transaction when we only need its inputs+outputs.
*/
class StoredTransaction implements Serializable {
private static final long serialVersionUID = 6243881368122528323L;
/**
* A transaction has some value and a script used for authenticating that the redeemer is allowed to spend
* this output.
*/
private List<StoredTransactionOutput> outputs;
private List<TransactionInput> inputs;
private long version;
private long lockTime;
private Sha256Hash hash;
public StoredTransaction(NetworkParameters params, Transaction tx, int height) {
inputs = new LinkedList<TransactionInput>();
outputs = new LinkedList<StoredTransactionOutput>();
for (TransactionInput in : tx.getInputs())
inputs.add(new TransactionInput(params, null, in.getScriptBytes(), in.getOutpoint()));
for (TransactionOutput out : tx.getOutputs())
outputs.add(new StoredTransactionOutput(null, out, height, tx.isCoinBase()));
this.version = tx.getVersion();
this.lockTime = tx.getLockTime();
this.hash = tx.getHash();
}
/**
* The lits of inputs in this transaction
*/
public List<TransactionInput> getInputs() {
return inputs;
}
/**
* The lits of outputs in this transaction
* Note that the hashes of all of these are null
*/
public List<StoredTransactionOutput> getOutputs() {
return outputs;
}
/**
* The hash of this stored transaction
*/
public Sha256Hash getHash() {
return hash;
}
/**
* The lockTime of the stored transaction
*/
public long getLockTime() {
return lockTime;
}
/**
* The version of the stored transaction
*/
public long getVersion() {
return version;
}
/**
* A coinbase transaction is one that creates a new coin. They are the first transaction in each block and their
* value is determined by a formula that all implementations of BitCoin share. In 2011 the value of a coinbase
* transaction is 50 coins, but in future it will be less. A coinbase transaction is defined not only by its
* position in a block but by the data in the inputs.
*/
public boolean isCoinBase() {
return inputs.get(0).isCoinBase();
}
public String toString() {
return "Stored Transaction: " + hash.toString();
}
public int hashCode() {
return getHash().hashCode();
}
public boolean equals(Object o) {
if (!(o instanceof StoredTransaction)) return false;
return ((StoredTransaction) o).getHash().equals(this.getHash());
}
}
/**
* Used as a key for memory map (to avoid having to think about NetworkParameters,
* which is required for {@link TransactionOutPoint}
*/
class StoredTransactionOutPoint implements Serializable {
private static final long serialVersionUID = -4064230006297064377L;
/** Hash of the transaction to which we refer. */
Sha256Hash hash;
/** Which output of that transaction we are talking about. */
long index;
StoredTransactionOutPoint(Sha256Hash hash, long index) {
this.hash = hash;
this.index = index;
}
StoredTransactionOutPoint(StoredTransactionOutput out) {
this.hash = out.getHash();
this.index = out.getIndex();
}
/**
* The hash of the transaction to which we refer
*/
Sha256Hash getHash() {
return hash;
}
/**
* The index of the output in transaction to which we refer
*/
long getIndex() {
return index;
}
public int hashCode() {
return this.hash.hashCode() + (int)index;
}
public String toString() {
return "Stored transaction out point: " + hash.toString() + ":" + index;
}
public boolean equals(Object o) {
if (!(o instanceof StoredTransactionOutPoint)) return false;
return ((StoredTransactionOutPoint)o).getIndex() == this.index &&
Objects.equal(this.getHash(), ((StoredTransactionOutPoint)o).getHash());
}
}
/**
* A HashMap<KeyType, ValueType> that is DB transaction-aware
* This class is not thread-safe.
*/
class TransactionalHashMap<KeyType, ValueType> {
ThreadLocal<HashMap<KeyType, ValueType>> tempMap;
ThreadLocal<HashSet<KeyType>> tempSetRemoved;
private ThreadLocal<Boolean> inTransaction;
HashMap<KeyType, ValueType> map;
public TransactionalHashMap() {
tempMap = new ThreadLocal<HashMap<KeyType, ValueType>>();
tempSetRemoved = new ThreadLocal<HashSet<KeyType>>();
inTransaction = new ThreadLocal<Boolean>();
map = new HashMap<KeyType, ValueType>();
}
public void beginDatabaseBatchWrite() {
inTransaction.set(true);
}
public void commitDatabaseBatchWrite() {
if (tempSetRemoved.get() != null)
for(KeyType key : tempSetRemoved.get())
map.remove(key);
if (tempMap.get() != null)
for (Map.Entry<KeyType, ValueType> entry : tempMap.get().entrySet())
map.put(entry.getKey(), entry.getValue());
abortDatabaseBatchWrite();
}
public void abortDatabaseBatchWrite() {
inTransaction.set(false);
tempSetRemoved.remove();
tempMap.remove();
}
public ValueType get(KeyType key) {
if (Boolean.TRUE.equals(inTransaction.get())) {
if (tempMap.get() != null) {
ValueType value = tempMap.get().get(key);
if (value != null)
return value;
}
if (tempSetRemoved.get() != null && tempSetRemoved.get().contains(key))
return null;
}
return map.get(key);
}
public void put(KeyType key, ValueType value) {
if (Boolean.TRUE.equals(inTransaction.get())) {
if (tempSetRemoved.get() != null)
tempSetRemoved.get().remove(key);
if (tempMap.get() == null)
tempMap.set(new HashMap<KeyType, ValueType>());
tempMap.get().put(key, value);
}else{
map.put(key, value);
}
}
public ValueType remove(KeyType key) {
if (Boolean.TRUE.equals(inTransaction.get())) {
ValueType retVal = map.get(key);
if (retVal != null) {
if (tempSetRemoved.get() == null)
tempSetRemoved.set(new HashSet<KeyType>());
tempSetRemoved.get().add(key);
}
if (tempMap.get() != null) {
ValueType tempVal = tempMap.get().remove(key);
if (tempVal != null)
return tempVal;
}
return retVal;
}else{
return map.remove(key);
}
}
}
/**
* A Map with multiple key types that is DB per-thread-transaction-aware.
* However, this class is not thread-safe.
* @param UniqueKeyType is a key that must be unique per object
* @param MultiKeyType is a key that can have multiple values
*/
class TransactionalMultiKeyHashMap<UniqueKeyType, MultiKeyType, ValueType> {
TransactionalHashMap<UniqueKeyType, ValueType> mapValues;
HashMap<MultiKeyType, Set<UniqueKeyType>> mapKeys;
public TransactionalMultiKeyHashMap() {
mapValues = new TransactionalHashMap<UniqueKeyType, ValueType>();
mapKeys = new HashMap<MultiKeyType, Set<UniqueKeyType>>();
}
public void BeginTransaction() {
mapValues.beginDatabaseBatchWrite();
}
public void CommitTransaction() {
mapValues.commitDatabaseBatchWrite();
}
public void AbortTransaction() {
mapValues.abortDatabaseBatchWrite();
}
public ValueType get(UniqueKeyType key) {
return mapValues.get(key);
}
public void put(UniqueKeyType uniqueKey, MultiKeyType multiKey, ValueType value) {
mapValues.put(uniqueKey, value);
Set<UniqueKeyType> set = mapKeys.get(multiKey);
if (set == null) {
set = new HashSet<UniqueKeyType>();
set.add(uniqueKey);
mapKeys.put(multiKey, set);
}else{
set.add(uniqueKey);
}
}
public ValueType removeByUniqueKey(UniqueKeyType key) {
return mapValues.remove(key);
}
public void removeByMultiKey(MultiKeyType key) {
Set<UniqueKeyType> set = mapKeys.remove(key);
if (set != null)
for (UniqueKeyType uniqueKey : set)
removeByUniqueKey(uniqueKey);
}
}
/**
* Keeps {@link StoredBlock}s, {@link StoredUndoableBlock}s and {@link StoredTransactionOutput}s in memory.
* Used primarily for unit testing.
*/
public class MemoryFullPrunedBlockStore implements FullPrunedBlockStore {
protected static class StoredBlockAndWasUndoableFlag {
public StoredBlock block;
public boolean wasUndoable;
public StoredBlockAndWasUndoableFlag(StoredBlock block, boolean wasUndoable) { this.block = block; this.wasUndoable = wasUndoable; }
}
private TransactionalHashMap<Sha256Hash, StoredBlockAndWasUndoableFlag> blockMap;
private TransactionalMultiKeyHashMap<Sha256Hash, Integer, StoredUndoableBlock> fullBlockMap;
//TODO: Use something more suited to remove-heavy use?
private TransactionalHashMap<StoredTransactionOutPoint, StoredTransactionOutput> transactionOutputMap;
private StoredBlock chainHead;
private StoredBlock verifiedChainHead;
private int fullStoreDepth;
/**
* Set up the MemoryFullPrunedBlockStore
* @param params The network parameters of this block store - used to get genesis block
* @param fullStoreDepth The depth of blocks to keep FullStoredBlocks instead of StoredBlocks
*/
public MemoryFullPrunedBlockStore(NetworkParameters params, int fullStoreDepth) {
blockMap = new TransactionalHashMap<Sha256Hash, StoredBlockAndWasUndoableFlag>();
fullBlockMap = new TransactionalMultiKeyHashMap<Sha256Hash, Integer, StoredUndoableBlock>();
transactionOutputMap = new TransactionalHashMap<StoredTransactionOutPoint, StoredTransactionOutput>();
this.fullStoreDepth = fullStoreDepth > 0 ? fullStoreDepth : 1;
// Insert the genesis block.
try {
StoredBlock storedGenesisHeader = new StoredBlock(params.getGenesisBlock().cloneAsHeader(), params.getGenesisBlock().getWork(), 0);
// The coinbase in the genesis block is not spendable
List<Transaction> genesisTransactions = Lists.newLinkedList();
StoredUndoableBlock storedGenesis = new StoredUndoableBlock(params.getGenesisBlock().getHash(), genesisTransactions);
put(storedGenesisHeader, storedGenesis);
setChainHead(storedGenesisHeader);
setVerifiedChainHead(storedGenesisHeader);
} catch (BlockStoreException e) {
throw new RuntimeException(e); // Cannot happen.
} catch (VerificationException e) {
throw new RuntimeException(e); // Cannot happen.
}
}
public synchronized void put(StoredBlock block) throws BlockStoreException {
Preconditions.checkNotNull(blockMap, "MemoryFullPrunedBlockStore is closed");
Sha256Hash hash = block.getHeader().getHash();
blockMap.put(hash, new StoredBlockAndWasUndoableFlag(block, false));
}
public synchronized void put(StoredBlock storedBlock, StoredUndoableBlock undoableBlock) throws BlockStoreException {
Preconditions.checkNotNull(blockMap, "MemoryFullPrunedBlockStore is closed");
Sha256Hash hash = storedBlock.getHeader().getHash();
fullBlockMap.put(hash, storedBlock.getHeight(), undoableBlock);
blockMap.put(hash, new StoredBlockAndWasUndoableFlag(storedBlock, true));
}
public synchronized StoredBlock get(Sha256Hash hash) throws BlockStoreException {
Preconditions.checkNotNull(blockMap, "MemoryFullPrunedBlockStore is closed");
StoredBlockAndWasUndoableFlag storedBlock = blockMap.get(hash);
return storedBlock == null ? null : storedBlock.block;
}
public synchronized StoredBlock getOnceUndoableStoredBlock(Sha256Hash hash) throws BlockStoreException {
Preconditions.checkNotNull(blockMap, "MemoryFullPrunedBlockStore is closed");
StoredBlockAndWasUndoableFlag storedBlock = blockMap.get(hash);
return (storedBlock != null && storedBlock.wasUndoable) ? storedBlock.block : null;
}
public synchronized StoredUndoableBlock getUndoBlock(Sha256Hash hash) throws BlockStoreException {
Preconditions.checkNotNull(fullBlockMap, "MemoryFullPrunedBlockStore is closed");
return fullBlockMap.get(hash);
}
public synchronized StoredBlock getChainHead() throws BlockStoreException {
Preconditions.checkNotNull(blockMap, "MemoryFullPrunedBlockStore is closed");
return chainHead;
}
public synchronized void setChainHead(StoredBlock chainHead) throws BlockStoreException {
Preconditions.checkNotNull(blockMap, "MemoryFullPrunedBlockStore is closed");
this.chainHead = chainHead;
}
public synchronized StoredBlock getVerifiedChainHead() throws BlockStoreException {
Preconditions.checkNotNull(blockMap, "MemoryFullPrunedBlockStore is closed");
return verifiedChainHead;
}
public synchronized void setVerifiedChainHead(StoredBlock chainHead) throws BlockStoreException {
Preconditions.checkNotNull(blockMap, "MemoryFullPrunedBlockStore is closed");
this.verifiedChainHead = chainHead;
if (this.chainHead.getHeight() < chainHead.getHeight())
setChainHead(chainHead);
// Potential leak here if not all blocks get setChainHead'd
// Though the FullPrunedBlockStore allows for this, the current AbstractBlockChain will not do it.
fullBlockMap.removeByMultiKey(chainHead.getHeight() - fullStoreDepth);
}
public void close() {
blockMap = null;
fullBlockMap = null;
transactionOutputMap = null;
}
public synchronized StoredTransactionOutput getTransactionOutput(Sha256Hash hash, long index) throws BlockStoreException {
Preconditions.checkNotNull(transactionOutputMap, "MemoryFullPrunedBlockStore is closed");
return transactionOutputMap.get(new StoredTransactionOutPoint(hash, index));
}
public synchronized void addUnspentTransactionOutput(StoredTransactionOutput out) throws BlockStoreException {
Preconditions.checkNotNull(transactionOutputMap, "MemoryFullPrunedBlockStore is closed");
transactionOutputMap.put(new StoredTransactionOutPoint(out), out);
}
public synchronized void removeUnspentTransactionOutput(StoredTransactionOutput out) throws BlockStoreException {
Preconditions.checkNotNull(transactionOutputMap, "MemoryFullPrunedBlockStore is closed");
if (transactionOutputMap.remove(new StoredTransactionOutPoint(out)) == null)
throw new BlockStoreException("Tried to remove a StoredTransactionOutput from MemoryFullPrunedBlockStore that it didn't have!");
}
public synchronized void beginDatabaseBatchWrite() throws BlockStoreException {
blockMap.beginDatabaseBatchWrite();
fullBlockMap.BeginTransaction();
transactionOutputMap.beginDatabaseBatchWrite();
}
public synchronized void commitDatabaseBatchWrite() throws BlockStoreException {
blockMap.commitDatabaseBatchWrite();
fullBlockMap.CommitTransaction();
transactionOutputMap.commitDatabaseBatchWrite();
}
public synchronized void abortDatabaseBatchWrite() throws BlockStoreException {
blockMap.abortDatabaseBatchWrite();
fullBlockMap.AbortTransaction();
transactionOutputMap.abortDatabaseBatchWrite();
}
public synchronized boolean hasUnspentOutputs(Sha256Hash hash, int numOutputs) throws BlockStoreException {
for (int i = 0; i < numOutputs; i++)
if (getTransactionOutput(hash, i) != null)
return true;
return false;
}
}
| |
/**
* Copyright (c) 2014,
* Charles Prud'homme (TASC, INRIA Rennes, LINA CNRS UMR 6241),
* Jean-Guillaume Fages (COSLING S.A.S.).
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <organization> nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.chocosolver.docs;
import org.chocosolver.solver.Solver;
import org.chocosolver.solver.constraints.ICF;
import org.chocosolver.solver.constraints.extension.Tuples;
import org.chocosolver.solver.constraints.nary.automata.FA.CostAutomaton;
import org.chocosolver.solver.constraints.nary.automata.FA.FiniteAutomaton;
import org.chocosolver.solver.constraints.nary.circuit.CircuitConf;
import org.chocosolver.solver.trace.Chatterbox;
import org.chocosolver.solver.variables.BoolVar;
import org.chocosolver.solver.variables.IntVar;
import org.chocosolver.solver.variables.Task;
import org.chocosolver.solver.variables.VF;
import org.testng.annotations.Test;
/**
* BEWARE: 5_elements.rst SHOULD BE UPDATED ANYTIME THIS CLASS IS CHANGED
*
* @author Charles Prud'homme
* @version choco
* @since 16/09/2014
*/
public class IntConstraintExamples {
@Test
public void arithm1() {
Solver solver = new Solver();
IntVar X = VF.enumerated("X", 1, 4, solver);
solver.post(ICF.arithm(X, ">", 2));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testmember1() {
Solver solver = new Solver();
IntVar X = VF.enumerated("X", 1, 4, solver);
solver.post(ICF.member(X, new int[]{-2, -1, 0, 1, 2}));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testmember2() {
Solver solver = new Solver();
IntVar X = VF.enumerated("X", 1, 4, solver);
solver.post(ICF.member(X, 2, 5));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testnotmember1() {
Solver solver = new Solver();
IntVar X = VF.enumerated("X", 1, 4, solver);
solver.post(ICF.not_member(X, new int[]{-2, -1, 0, 1, 2}));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testnotmember2() {
Solver solver = new Solver();
IntVar X = VF.enumerated("X", 1, 4, solver);
solver.post(ICF.not_member(X, 2, 5));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testabsolute() {
Solver solver = new Solver();
IntVar X = VF.enumerated("X", 0, 2, solver);
IntVar Y = VF.enumerated("X", -6, 1, solver);
solver.post(ICF.absolute(X, Y));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testarithm3() {
Solver solver = new Solver();
IntVar X = VF.enumerated("X", 0, 2, solver);
IntVar Y = VF.enumerated("X", -6, 1, solver);
solver.post(ICF.arithm(X, "<=", Y, "+", 1));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testdistance1() {
Solver solver = new Solver();
IntVar X = VF.enumerated("X", 0, 2, solver);
IntVar Y = VF.enumerated("X", -3, 1, solver);
solver.post(ICF.distance(X, Y, "=", 1));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testelement1() {
Solver solver = new Solver();
IntVar V = VF.enumerated("V", -2, 2, solver);
IntVar I = VF.enumerated("I", 0, 5, solver);
solver.post(ICF.element(V, new int[]{2, -2, 1, -1, 0}, I, 0, "none"));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testsquare() {
Solver solver = new Solver();
IntVar X = VF.enumerated("X", 0, 5, solver);
IntVar Y = VF.enumerated("Y", -1, 3, solver);
solver.post(ICF.square(X, Y));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testtable1() {
Solver solver = new Solver();
IntVar X = VF.enumerated("X", 0, 5, solver);
IntVar Y = VF.enumerated("Y", -1, 3, solver);
Tuples tuples = new Tuples(true);
tuples.add(1, -2);
tuples.add(1, 1);
tuples.add(4, 2);
tuples.add(1, 4);
solver.post(ICF.table(X, Y, tuples, "AC2001"));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testdistance2() {
Solver solver = new Solver();
IntVar X = VF.enumerated("X", 1, 3, solver);
IntVar Y = VF.enumerated("Y", -1, 1, solver);
IntVar Z = VF.enumerated("Z", 2, 3, solver);
solver.post(ICF.distance(X, Y, "<", Z));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testeucli_div() {
Solver solver = new Solver();
IntVar X = VF.enumerated("X", 1, 3, solver);
IntVar Y = VF.enumerated("Y", -1, 1, solver);
IntVar Z = VF.enumerated("Z", 2, 3, solver);
solver.post(ICF.eucl_div(X, Y, Z));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testmaximum() {
Solver solver = new Solver();
IntVar MAX = VF.enumerated("MAX", 1, 3, solver);
IntVar Y = VF.enumerated("Y", -1, 1, solver);
IntVar Z = VF.enumerated("Z", 2, 3, solver);
solver.post(ICF.maximum(MAX, Y, Z));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testminimum() {
Solver solver = new Solver();
IntVar MIN = VF.enumerated("MIN", 1, 3, solver);
IntVar Y = VF.enumerated("Y", -1, 1, solver);
IntVar Z = VF.enumerated("Z", 2, 3, solver);
solver.post(ICF.minimum(MIN, Y, Z));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testmod() {
Solver solver = new Solver();
IntVar X = VF.enumerated("X", 2, 4, solver);
IntVar Y = VF.enumerated("Y", -1, 4, solver);
IntVar Z = VF.enumerated("Z", 1, 3, solver);
solver.post(ICF.mod(X, Y, Z));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testtimes() {
Solver solver = new Solver();
IntVar X = VF.enumerated("X", -1, 2, solver);
IntVar Y = VF.enumerated("Y", 2, 4, solver);
IntVar Z = VF.enumerated("Z", 5, 7, solver);
solver.post(ICF.times(X, Y, Z));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testalldifferent() {
Solver solver = new Solver();
IntVar W = VF.enumerated("W", 0, 1, solver);
IntVar X = VF.enumerated("X", -1, 2, solver);
IntVar Y = VF.enumerated("Y", 2, 4, solver);
IntVar Z = VF.enumerated("Z", 5, 7, solver);
solver.post(ICF.alldifferent(new IntVar[]{W, X, Y, Z}));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testalldifferent_cond() {
Solver solver = new Solver();
IntVar[] XS = VF.enumeratedArray("XS", 5, 0, 3, solver);
solver.post(ICF.alldifferent_conditionnal(XS,
x -> !x.contains(1) && !x.contains(3)));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testalldifferent_exc0() {
Solver solver = new Solver();
IntVar[] XS = VF.enumeratedArray("XS", 4, 0, 2, solver);
solver.post(ICF.alldifferent_except_0(XS));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testamong() {
Solver solver = new Solver();
IntVar N = VF.enumerated("N", 2, 3, solver);
IntVar[] XS = VF.enumeratedArray("XS", 4, 0, 6, solver);
solver.post(ICF.among(N, XS, new int[]{1, 2, 3}));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testatleast_nvalues() {
Solver solver = new Solver();
IntVar[] XS = VF.enumeratedArray("XS", 4, 0, 2, solver);
IntVar N = VF.enumerated("N", 2, 3, solver);
solver.post(ICF.atleast_nvalues(XS, N, true));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testatmost_nvalues() {
Solver solver = new Solver();
IntVar[] XS = VF.enumeratedArray("XS", 4, 0, 2, solver);
IntVar N = VF.enumerated("N", 1, 3, solver);
solver.post(ICF.atmost_nvalues(XS, N, false));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testbin_packing() {
Solver solver = new Solver();
IntVar[] IBIN = VF.enumeratedArray("IBIN", 5, 1, 3, solver);
int[] sizes = new int[]{2, 3, 1, 4, 2};
IntVar[] BLOADS = VF.enumeratedArray("BLOADS", 3, 0, 5, solver);
solver.post(ICF.bin_packing(IBIN, sizes, BLOADS, 1));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testboolean_channeling() {
Solver solver = new Solver();
BoolVar[] BVARS = VF.boolArray("BVARS", 5, solver);
IntVar VAR = VF.enumerated("VAR", 1, 5, solver);
solver.post(ICF.boolean_channeling(BVARS, VAR, 1));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testcircuit() {
Solver solver = new Solver();
IntVar[] NODES = VF.enumeratedArray("NODES", 5, 0, 4, solver);
solver.post(ICF.circuit(NODES, 0, CircuitConf.LIGHT));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testcost_regular() {
Solver solver = new Solver();
IntVar[] VARS = VF.enumeratedArray("VARS", 5, 0, 2, solver);
IntVar COST = VF.enumerated("COST", 0, 10, solver);
FiniteAutomaton fauto = new FiniteAutomaton();
int start = fauto.addState();
int end = fauto.addState();
fauto.setInitialState(start);
fauto.setFinal(start, end);
fauto.addTransition(start, start, 0, 1);
fauto.addTransition(start, end, 2);
fauto.addTransition(end, end, 1);
fauto.addTransition(end, start, 0, 2);
int[][] costs = new int[5][3];
costs[0] = new int[]{1, 2, 3};
costs[1] = new int[]{2, 3, 1};
costs[2] = new int[]{3, 1, 2};
costs[3] = new int[]{3, 2, 1};
costs[4] = new int[]{2, 1, 3};
solver.post(ICF.cost_regular(VARS, COST, CostAutomaton.makeSingleResource(fauto, costs, COST.getLB(), COST.getUB())));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testcount() {
Solver solver = new Solver();
IntVar[] VS = VF.enumeratedArray("VS", 4, 0, 3, solver);
IntVar VA = VF.enumerated("VA", new int[]{1, 3}, solver);
IntVar CO = VF.enumerated("CO", new int[]{0, 2, 4}, solver);
solver.post(ICF.count(VA, VS, CO));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testcumulative() {
Solver solver = new Solver();
Task[] TS = new Task[5];
IntVar[] HE = new IntVar[5];
for (int i = 0; i < TS.length; i++) {
IntVar S = VF.bounded("S_" + i, 0, 4, solver);
TS[i] = VF.task(
S,
VF.fixed("D_" + i, i + 1, solver),
VF.offset(S, i + 1)
);
HE[i] = VF.bounded("HE_" + i, i - 1, i + 1, solver);
}
IntVar CA = VF.enumerated("CA", 1, 3, solver);
solver.post(ICF.cumulative(TS, HE, CA, true));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testdiffn() {
Solver solver = new Solver();
IntVar[] X = VF.boundedArray("X", 4, 0, 1, solver);
IntVar[] Y = VF.boundedArray("Y", 4, 0, 2, solver);
IntVar[] D = new IntVar[4];
IntVar[] W = new IntVar[4];
for (int i = 0; i < 4; i++) {
D[i] = VF.fixed("D_" + i, 1, solver);
W[i] = VF.fixed("W_" + i, i + 1, solver);
}
solver.post(ICF.diffn(X, Y, D, W, true));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testglobal_cardinality() {
Solver solver = new Solver();
IntVar[] VS = VF.boundedArray("VS", 4, 0, 4, solver);
int[] values = new int[]{-1, 1, 2};
IntVar[] OCC = VF.boundedArray("OCC", 3, 0, 2, solver);
solver.post(ICF.global_cardinality(VS, values, OCC, true));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testinverse_channeling() {
Solver solver = new Solver();
IntVar[] X = VF.enumeratedArray("X", 3, 0, 3, solver);
IntVar[] Y = VF.enumeratedArray("Y", 3, 1, 4, solver);
solver.post(ICF.inverse_channeling(X, Y, 0, 1));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testknapsack() {
Solver solver = new Solver();
IntVar[] IT = new IntVar[3]; // 3 items
IT[0] = VF.bounded("IT_0", 0, 3, solver);
IT[1] = VF.bounded("IT_1", 0, 2, solver);
IT[2] = VF.bounded("IT_2", 0, 1, solver);
IntVar WE = VF.bounded("WE", 0, 8, solver);
IntVar EN = VF.bounded("EN", 0, 6, solver);
int[] weights = new int[]{1, 3, 4};
int[] energies = new int[]{1, 4, 6};
solver.post(ICF.knapsack(IT, WE, EN, weights, energies));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testlex_chain_less() {
Solver solver = new Solver();
IntVar[] X = VF.enumeratedArray("X", 3, -1, 1, solver);
IntVar[] Y = VF.enumeratedArray("Y", 3, 1, 2, solver);
IntVar[] Z = VF.enumeratedArray("Z", 3, 0, 2, solver);
solver.post(ICF.lex_chain_less(X, Y, Z));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testlex_chain_less_eq() {
Solver solver = new Solver();
IntVar[] X = VF.enumeratedArray("X", 3, -1, 1, solver);
IntVar[] Y = VF.enumeratedArray("Y", 3, 1, 2, solver);
IntVar[] Z = VF.enumeratedArray("Z", 3, 0, 2, solver);
solver.post(ICF.lex_chain_less_eq(X, Y, Z));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testlex_less() {
Solver solver = new Solver();
IntVar[] X = VF.enumeratedArray("X", 3, -1, 1, solver);
IntVar[] Y = VF.enumeratedArray("Y", 3, 1, 2, solver);
solver.post(ICF.lex_less(X, Y));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testlex_less_eq() {
Solver solver = new Solver();
IntVar[] X = VF.enumeratedArray("X", 3, -1, 1, solver);
IntVar[] Y = VF.enumeratedArray("Y", 3, 1, 2, solver);
solver.post(ICF.lex_less_eq(X, Y));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testmulticost_regular() {
Solver solver = new Solver();
IntVar[] VARS = VF.enumeratedArray("VARS", 5, 0, 2, solver);
IntVar[] CVARS = VF.enumeratedArray("CVARS", 5, 0, 10, solver);
FiniteAutomaton fauto = new FiniteAutomaton();
int start = fauto.addState();
int end = fauto.addState();
fauto.setInitialState(start);
fauto.setFinal(start, end);
fauto.addTransition(start, start, 0, 1);
fauto.addTransition(start, end, 2);
fauto.addTransition(end, end, 1);
fauto.addTransition(end, start, 0, 2);
int[][][] costs = new int[5][3][];
// costs[0] = new int[]{1, 2, 3};
// costs[1] = new int[]{2, 3, 1};
// costs[2] = new int[]{3, 1, 2};
// costs[3] = new int[]{3, 2, 1};
// costs[4] = new int[]{2, 1, 3};
solver.post(ICF.multicost_regular(VARS, CVARS, CostAutomaton.makeMultiResources(fauto, costs, CVARS)));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testnvalues() {
Solver solver = new Solver();
IntVar[] VS = VF.enumeratedArray("VS", 4, 0, 2, solver);
IntVar N = VF.enumerated("N", 0, 3, solver);
solver.post(ICF.nvalues(VS, N));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testpath() {
Solver solver = new Solver();
IntVar[] VS = VF.enumeratedArray("VS", 4, 0, 4, solver);
IntVar S = VF.enumerated("S", 0, 3, solver);
IntVar E = VF.enumerated("E", 0, 3, solver);
solver.post(ICF.path(VS, S, E, 0));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testregular() {
Solver solver = new Solver();
IntVar[] CS = VF.enumeratedArray("CS", 4, 1, 5, solver);
solver.post(ICF.regular(CS,
new FiniteAutomaton("(1|2)(3*)(4|5)")));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testscalar() {
Solver solver = new Solver();
IntVar[] CS = VF.enumeratedArray("CS", 4, 1, 4, solver);
int[] coeffs = new int[]{1, 2, 3, 4};
IntVar R = VF.bounded("R", 0, 20, solver);
solver.post(ICF.scalar(CS, coeffs, R));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testsort() {
Solver solver = new Solver();
IntVar[] X = VF.enumeratedArray("X", 3, 0, 2, solver);
IntVar[] Y = VF.enumeratedArray("Y", 3, 0, 2, solver);
solver.post(ICF.sort(X, Y));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testsubcircuit() {
Solver solver = new Solver();
IntVar[] NODES = VF.enumeratedArray("NS", 5, 0, 4, solver);
IntVar SI = VF.enumerated("SI", 2, 3, solver);
solver.post(ICF.subcircuit(NODES, 0, SI));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testsubpath() {
Solver solver = new Solver();
IntVar[] VS = VF.enumeratedArray("VS", 4, 0, 4, solver);
IntVar S = VF.enumerated("S", 0, 3, solver);
IntVar E = VF.enumerated("E", 0, 3, solver);
IntVar SI = VF.enumerated("SI", 2, 3, solver);
solver.post(ICF.subpath(VS, S, E, 0, SI));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testsum() {
Solver solver = new Solver();
IntVar[] VS = VF.enumeratedArray("VS", 4, 0, 4, solver);
IntVar SU = VF.enumerated("SU", 2, 3, solver);
solver.post(ICF.sum(VS, "<=", SU));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testtree() {
Solver solver = new Solver();
IntVar[] VS = VF.enumeratedArray("VS", 4, 0, 4, solver);
IntVar NT = VF.enumerated("NT", 2, 3, solver);
solver.post(ICF.tree(VS, NT, 0));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testtsp() {
Solver solver = new Solver();
IntVar[] VS = VF.enumeratedArray("VS", 4, 0, 4, solver);
IntVar CO = VF.enumerated("CO", 0, 15, solver);
int[][] costs = new int[][]{{0, 1, 3, 7}, {1, 0, 1, 3}, {3, 1, 0, 1}, {7, 3, 1, 0}};
solver.post(ICF.tsp(VS, CO, costs));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
@Test(groups = "1s")
public void testbit_channeling() {
Solver solver = new Solver();
BoolVar[] BVARS = VF.boolArray("BVARS", 4, solver);
IntVar VAR = VF.enumerated("VAR", 0, 15, solver);
solver.post(ICF.bit_channeling(BVARS, VAR));
Chatterbox.showSolutions(solver);
solver.findAllSolutions();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.work.foreman;
import io.netty.buffer.ByteBuf;
import java.io.Closeable;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.logical.LogicalPlan;
import org.apache.drill.common.logical.PlanProperties.Generator.ResultMode;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.cache.DistributedCache.CacheConfig;
import org.apache.drill.exec.cache.DistributedCache.SerializationMode;
import org.apache.drill.exec.coord.DistributedSemaphore;
import org.apache.drill.exec.coord.DistributedSemaphore.DistributedLease;
import org.apache.drill.exec.exception.FragmentSetupException;
import org.apache.drill.exec.exception.OptimizerException;
import org.apache.drill.exec.ops.QueryContext;
import org.apache.drill.exec.opt.BasicOptimizer;
import org.apache.drill.exec.physical.PhysicalPlan;
import org.apache.drill.exec.physical.base.PhysicalOperator;
import org.apache.drill.exec.physical.config.ExternalSort;
import org.apache.drill.exec.physical.impl.SendingAccountor;
import org.apache.drill.exec.physical.impl.materialize.QueryWritableBatch;
import org.apache.drill.exec.pigparser.planconverter.PigPlanTranslator;
import org.apache.drill.exec.pigparser.util.PigParserUtil;
import org.apache.drill.exec.planner.fragment.Fragment;
import org.apache.drill.exec.planner.fragment.MakeFragmentsVisitor;
import org.apache.drill.exec.planner.fragment.PlanningSet;
import org.apache.drill.exec.planner.fragment.SimpleParallelizer;
import org.apache.drill.exec.planner.fragment.StatsCollector;
import org.apache.drill.exec.planner.sql.DirectPlan;
import org.apache.drill.exec.planner.sql.DrillSqlWorker;
import org.apache.drill.exec.proto.BitControl.PlanFragment;
import org.apache.drill.exec.proto.ExecProtos.FragmentHandle;
import org.apache.drill.exec.proto.GeneralRPCProtos.Ack;
import org.apache.drill.exec.proto.UserBitShared.DrillPBError;
import org.apache.drill.exec.proto.UserBitShared.QueryId;
import org.apache.drill.exec.proto.UserBitShared.QueryResult;
import org.apache.drill.exec.proto.UserBitShared.QueryResult.QueryState;
import org.apache.drill.exec.proto.UserProtos.RequestResults;
import org.apache.drill.exec.proto.UserProtos.RunQuery;
import org.apache.drill.exec.proto.helper.QueryIdHelper;
import org.apache.drill.exec.rpc.BaseRpcOutcomeListener;
import org.apache.drill.exec.rpc.RpcException;
import org.apache.drill.exec.rpc.RpcOutcomeListener;
import org.apache.drill.exec.rpc.user.UserServer.UserClientConnection;
import org.apache.drill.exec.server.DrillbitContext;
import org.apache.drill.exec.util.AtomicState;
import org.apache.drill.exec.util.Pointer;
import org.apache.drill.exec.work.ErrorHelper;
import org.apache.drill.exec.work.QueryWorkUnit;
import org.apache.drill.exec.work.WorkManager.WorkerBee;
import com.google.common.collect.Lists;
/**
* Foreman manages all queries where this is the driving/root node.
*/
public class Foreman implements Runnable, Closeable, Comparable<Object>{
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Foreman.class);
public static final CacheConfig<FragmentHandle, PlanFragment> FRAGMENT_CACHE = CacheConfig //
.newBuilder(FragmentHandle.class, PlanFragment.class) //
.mode(SerializationMode.PROTOBUF) //
.build();
private QueryId queryId;
private RunQuery queryRequest;
private QueryContext context;
private QueryManager fragmentManager;
private WorkerBee bee;
private UserClientConnection initiatingClient;
private final AtomicState<QueryState> state;
private final DistributedSemaphore smallSemaphore;
private final DistributedSemaphore largeSemaphore;
private final long queueThreshold;
private final long queueTimeout;
private volatile DistributedLease lease;
private final boolean queuingEnabled;
public Foreman(WorkerBee bee, DrillbitContext dContext, UserClientConnection connection, QueryId queryId,
RunQuery queryRequest) {
this.queryId = queryId;
this.queryRequest = queryRequest;
this.context = new QueryContext(connection.getSession(), queryId, dContext);
this.queuingEnabled = context.getOptions().getOption(ExecConstants.ENABLE_QUEUE_KEY).bool_val;
if (queuingEnabled) {
int smallQueue = context.getOptions().getOption(ExecConstants.SMALL_QUEUE_KEY).num_val.intValue();
int largeQueue = context.getOptions().getOption(ExecConstants.LARGE_QUEUE_KEY).num_val.intValue();
this.largeSemaphore = dContext.getClusterCoordinator().getSemaphore("query.large", largeQueue);
this.smallSemaphore = dContext.getClusterCoordinator().getSemaphore("query.small", smallQueue);
this.queueThreshold = context.getOptions().getOption(ExecConstants.QUEUE_THRESHOLD_KEY).num_val;
this.queueTimeout = context.getOptions().getOption(ExecConstants.QUEUE_TIMEOUT_KEY).num_val;
} else {
this.largeSemaphore = null;
this.smallSemaphore = null;
this.queueThreshold = 0;
this.queueTimeout = 0;
}
this.initiatingClient = connection;
this.fragmentManager = new QueryManager(queryId, queryRequest, bee.getContext().getPersistentStoreProvider(), new ForemanManagerListener(), dContext.getController(), this);
this.bee = bee;
this.state = new AtomicState<QueryState>(QueryState.PENDING) {
@Override
protected QueryState getStateFromNumber(int i) {
return QueryState.valueOf(i);
}
};
}
public QueryContext getContext() {
return context;
}
private boolean isFinished() {
switch(state.getState()) {
case PENDING:
case RUNNING:
return false;
default:
return true;
}
}
private void fail(String message, Throwable t) {
if(isFinished()) {
logger.error("Received a failure message query finished of: {}", message, t);
}
if (!state.updateState(QueryState.RUNNING, QueryState.FAILED)) {
if (!state.updateState(QueryState.PENDING, QueryState.FAILED)) {
logger.warn("Tried to update query state to FAILED, but was not RUNNING");
}
}
boolean verbose = getContext().getOptions().getOption(ExecConstants.ENABLE_VERBOSE_ERRORS_KEY).bool_val;
DrillPBError error = ErrorHelper.logAndConvertError(context.getCurrentEndpoint(), message, t, logger, verbose);
QueryResult result = QueryResult //
.newBuilder() //
.addError(error) //
.setIsLastChunk(true) //
.setQueryState(QueryState.FAILED) //
.setQueryId(queryId) //
.build();
cleanupAndSendResult(result);
}
public void cancel() {
if (isFinished()) {
return;
}
// cancel remote fragments.
fragmentManager.cancel();
QueryResult result = QueryResult.newBuilder().setQueryState(QueryState.CANCELED).setIsLastChunk(true).setQueryId(queryId).build();
cleanupAndSendResult(result);
}
void cleanupAndSendResult(QueryResult result) {
bee.retireForeman(this);
initiatingClient.sendResult(new ResponseSendListener(), new QueryWritableBatch(result), true);
state.updateState(QueryState.RUNNING, QueryState.COMPLETED);
}
private class ResponseSendListener extends BaseRpcOutcomeListener<Ack> {
@Override
public void failed(RpcException ex) {
logger.info(
"Failure while trying communicate query result to initating client. This would happen if a client is disconnected before response notice can be sent.",
ex);
}
}
/**
* Called by execution pool to do foreman setup. Actual query execution is a separate phase (and can be scheduled).
*/
public void run() {
final String originalThread = Thread.currentThread().getName();
Thread.currentThread().setName(QueryIdHelper.getQueryId(queryId) + ":foreman");
fragmentManager.getStatus().setStartTime(System.currentTimeMillis());
// convert a run query request into action
try {
switch (queryRequest.getType()) {
case LOGICAL:
parseAndRunLogicalPlan(queryRequest.getPlan());
break;
case PHYSICAL:
parseAndRunPhysicalPlan(queryRequest.getPlan());
break;
case SQL:
runSQL(queryRequest.getPlan());
break;
case PIGLATIN:
parseAndRunPigLatin(queryRequest.getPlan());
break;
default:
throw new UnsupportedOperationException();
}
} catch (AssertionError | Exception ex) {
fail("Failure while setting up Foreman.", ex);
} catch (OutOfMemoryError e) {
System.out.println("Out of memory, exiting.");
System.out.flush();
System.exit(-1);
} finally {
releaseLease();
Thread.currentThread().setName(originalThread);
}
}
private void releaseLease() {
if (lease != null) {
try {
lease.close();
} catch (Exception e) {
logger.warn("Failure while releasing lease.", e);
};
}
}
private void parseAndRunPigLatin(String text) {
try {
LogicalPlan logicalPlan = new PigPlanTranslator().toDrillLogicalPlan(text, PigParserUtil.PigExecType.LOCALFILE);
if(null == logicalPlan){
fail("Failure in converting Pig script to Drill Logical Plan.", new Exception());
}
parseAndRunLogicalPlan(logicalPlan.toJsonString(DrillConfig.create()));
} catch (Exception e){
fail("Failure in parsing script.", e);
}
}
private void parseAndRunLogicalPlan(String json) {
try {
LogicalPlan logicalPlan = context.getPlanReader().readLogicalPlan(json);
if (logicalPlan.getProperties().resultMode == ResultMode.LOGICAL) {
fail("Failure running plan. You requested a result mode of LOGICAL and submitted a logical plan. In this case you're output mode must be PHYSICAL or EXEC.", new Exception());
}
if (logger.isDebugEnabled()) {
logger.debug("Logical {}", logicalPlan.unparse(context.getConfig()));
}
PhysicalPlan physicalPlan = convert(logicalPlan);
if (logicalPlan.getProperties().resultMode == ResultMode.PHYSICAL) {
returnPhysical(physicalPlan);
return;
}
if (logger.isDebugEnabled()) {
logger.debug("Physical {}", context.getConfig().getMapper().writeValueAsString(physicalPlan));
}
runPhysicalPlan(physicalPlan);
} catch (IOException e) {
fail("Failure while parsing logical plan.", e);
} catch (OptimizerException e) {
fail("Failure while converting logical plan to physical plan.", e);
}
}
private void returnPhysical(PhysicalPlan plan) {
String jsonPlan = plan.unparse(context.getConfig().getMapper().writer());
runPhysicalPlan(DirectPlan.createDirectPlan(context, new PhysicalFromLogicalExplain(jsonPlan)));
}
private class PhysicalFromLogicalExplain{
public String json;
public PhysicalFromLogicalExplain(String json) {
super();
this.json = json;
}
}
class SingleListener implements RpcOutcomeListener<Ack>{
final SendingAccountor acct;
public SingleListener() {
acct = new SendingAccountor();
acct.increment();
acct.increment();
}
@Override
public void failed(RpcException ex) {
acct.decrement();
fail("Failure while sending single result.", ex);
}
@Override
public void success(Ack value, ByteBuf buffer) {
acct.decrement();
}
}
private void parseAndRunPhysicalPlan(String json) {
try {
PhysicalPlan plan = context.getPlanReader().readPhysicalPlan(json);
runPhysicalPlan(plan);
} catch (IOException e) {
fail("Failure while parsing physical plan.", e);
}
}
private void runPhysicalPlan(PhysicalPlan plan) {
if(plan.getProperties().resultMode != ResultMode.EXEC) {
fail(String.format("Failure running plan. You requested a result mode of %s and a physical plan can only be output as EXEC", plan.getProperties().resultMode), new Exception());
}
PhysicalOperator rootOperator = plan.getSortedOperators(false).iterator().next();
MakeFragmentsVisitor makeFragmentsVisitor = new MakeFragmentsVisitor();
Fragment rootFragment;
try {
rootFragment = rootOperator.accept(makeFragmentsVisitor, null);
} catch (FragmentSetupException e) {
fail("Failure while fragmenting query.", e);
return;
}
int sortCount = 0;
for (PhysicalOperator op : plan.getSortedOperators()) {
if (op instanceof ExternalSort) {
sortCount++;
}
}
if (sortCount > 0) {
long maxWidthPerNode = context.getOptions().getOption(ExecConstants.MAX_WIDTH_PER_NODE_KEY).num_val;
long maxAllocPerNode = Math.min(DrillConfig.getMaxDirectMemory(), context.getConfig().getLong(ExecConstants.TOP_LEVEL_MAX_ALLOC));
maxAllocPerNode = Math.min(maxAllocPerNode, context.getOptions().getOption(ExecConstants.MAX_QUERY_MEMORY_PER_NODE_KEY).num_val);
long maxSortAlloc = maxAllocPerNode / (sortCount * maxWidthPerNode);
logger.debug("Max sort alloc: {}", maxSortAlloc);
for (PhysicalOperator op : plan.getSortedOperators()) {
if (op instanceof ExternalSort) {
((ExternalSort)op).setMaxAllocation(maxSortAlloc);
}
}
}
PlanningSet planningSet = StatsCollector.collectStats(rootFragment);
SimpleParallelizer parallelizer = new SimpleParallelizer(context);
try {
double size = 0;
for (PhysicalOperator ops : plan.getSortedOperators()) {
size += ops.getCost();
}
if (queuingEnabled) {
if (size > this.queueThreshold) {
this.lease = largeSemaphore.acquire(this.queueTimeout, TimeUnit.MILLISECONDS);
} else {
this.lease = smallSemaphore.acquire(this.queueTimeout, TimeUnit.MILLISECONDS);
}
}
QueryWorkUnit work = parallelizer.getFragments(context.getOptions().getOptionList(), context.getCurrentEndpoint(),
queryId, context.getActiveEndpoints(), context.getPlanReader(), rootFragment, planningSet, initiatingClient.getSession());
this.context.getWorkBus().setFragmentStatusListener(work.getRootFragment().getHandle().getQueryId(), fragmentManager);
List<PlanFragment> leafFragments = Lists.newArrayList();
List<PlanFragment> intermediateFragments = Lists.newArrayList();
// store fragments in distributed grid.
logger.debug("Storing fragments");
List<Future<PlanFragment>> queue = new LinkedList<>();
for (PlanFragment f : work.getFragments()) {
// store all fragments in grid since they are part of handshake.
queue.add(context.getCache().getMap(FRAGMENT_CACHE).put(f.getHandle(), f));
if (f.getLeafFragment()) {
leafFragments.add(f);
} else {
intermediateFragments.add(f);
}
}
for (Future<PlanFragment> f : queue) {
try {
f.get(10, TimeUnit.SECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
throw new ExecutionSetupException("failure while storing plan fragments", e);
}
}
int totalFragments = 1 + intermediateFragments.size() + leafFragments.size();
fragmentManager.getStatus().setTotalFragments(totalFragments);
fragmentManager.getStatus().updateCache();
logger.debug("Fragments stored.");
logger.debug("Submitting fragments to run.");
fragmentManager.runFragments(bee, work.getRootFragment(), work.getRootOperator(), initiatingClient, leafFragments, intermediateFragments);
logger.debug("Fragments running.");
state.updateState(QueryState.PENDING, QueryState.RUNNING);
} catch (Exception e) {
fail("Failure while setting up query.", e);
}
}
private void runSQL(String sql) {
try{
DrillSqlWorker sqlWorker = new DrillSqlWorker(context);
Pointer<String> textPlan = new Pointer<>();
PhysicalPlan plan = sqlWorker.getPlan(sql, textPlan);
fragmentManager.getStatus().setPlanText(textPlan.value);
runPhysicalPlan(plan);
} catch(Exception e) {
fail("Failure while parsing sql.", e);
}
}
private PhysicalPlan convert(LogicalPlan plan) throws OptimizerException {
if (logger.isDebugEnabled()) {
logger.debug("Converting logical plan {}.", plan.toJsonStringSafe(context.getConfig()));
}
return new BasicOptimizer(DrillConfig.create(), context, initiatingClient).optimize(new BasicOptimizer.BasicOptimizationContext(context), plan);
}
public QueryResult getResult(UserClientConnection connection, RequestResults req) {
throw new UnsupportedOperationException();
}
public QueryId getQueryId() {
return queryId;
}
@Override
public void close() throws IOException {
}
public QueryState getQueryState() {
return this.state.getState();
}
public QueryStatus getQueryStatus() {
return this.fragmentManager.getStatus();
}
class ForemanManagerListener{
void fail(String message, Throwable t) {
ForemanManagerListener.this.fail(message, t);
}
void cleanupAndSendResult(QueryResult result) {
Foreman.this.cleanupAndSendResult(result);
}
}
@Override
public int compareTo(Object o) {
return o.hashCode() - o.hashCode();
}
}
| |
// Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.exec;
import build.bazel.remote.execution.v2.Platform;
import com.google.common.base.Preconditions;
import com.google.common.hash.HashCode;
import com.google.devtools.build.lib.actions.ActionContext;
import com.google.devtools.build.lib.actions.ActionInput;
import com.google.devtools.build.lib.actions.ExecException;
import com.google.devtools.build.lib.actions.FileArtifactValue;
import com.google.devtools.build.lib.actions.MetadataProvider;
import com.google.devtools.build.lib.actions.Spawn;
import com.google.devtools.build.lib.actions.SpawnResult;
import com.google.devtools.build.lib.actions.Spawns;
import com.google.devtools.build.lib.actions.cache.VirtualActionInput;
import com.google.devtools.build.lib.analysis.platform.PlatformUtils;
import com.google.devtools.build.lib.exec.Protos.Digest;
import com.google.devtools.build.lib.exec.Protos.File;
import com.google.devtools.build.lib.exec.Protos.SpawnExec;
import com.google.devtools.build.lib.remote.options.RemoteOptions;
import com.google.devtools.build.lib.util.io.MessageOutputStream;
import com.google.devtools.build.lib.vfs.DigestHashFunction;
import com.google.devtools.build.lib.vfs.Dirent;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.Symlinks;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.function.Consumer;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Nullable;
/**
* A logging utility for spawns.
*/
public class SpawnLogContext implements ActionContext {
private static final Logger logger = Logger.getLogger(SpawnLogContext.class.getName());
private final Path execRoot;
private final MessageOutputStream executionLog;
@Nullable private final RemoteOptions remoteOptions;
public SpawnLogContext(
Path execRoot, MessageOutputStream executionLog, @Nullable RemoteOptions remoteOptions) {
this.execRoot = execRoot;
this.executionLog = executionLog;
this.remoteOptions = remoteOptions;
}
/** Log the executed spawn to the output stream. */
public void logSpawn(
Spawn spawn,
MetadataProvider metadataProvider,
SortedMap<PathFragment, ActionInput> inputMap,
Duration timeout,
SpawnResult result)
throws IOException, ExecException {
SortedMap<Path, ActionInput> existingOutputs = listExistingOutputs(spawn);
SpawnExec.Builder builder = SpawnExec.newBuilder();
builder.addAllCommandArgs(spawn.getArguments());
Map<String, String> env = spawn.getEnvironment();
// Sorting the environment pairs by variable name.
TreeSet<String> variables = new TreeSet<>(env.keySet());
for (String var : variables) {
builder.addEnvironmentVariablesBuilder().setName(var).setValue(env.get(var));
}
try {
for (Map.Entry<PathFragment, ActionInput> e : inputMap.entrySet()) {
ActionInput input = e.getValue();
Path inputPath = execRoot.getRelative(input.getExecPathString());
if (inputPath.isDirectory()) {
listDirectoryContents(inputPath, (file) -> builder.addInputs(file), metadataProvider);
} else {
Digest digest = computeDigest(input, null, metadataProvider);
builder.addInputsBuilder().setPath(input.getExecPathString()).setDigest(digest);
}
}
} catch (IOException e) {
logger.log(Level.WARNING, "Error computing spawn inputs", e);
}
ArrayList<String> outputPaths = new ArrayList<>();
for (ActionInput output : spawn.getOutputFiles()) {
outputPaths.add(output.getExecPathString());
}
Collections.sort(outputPaths);
builder.addAllListedOutputs(outputPaths);
for (Map.Entry<Path, ActionInput> e : existingOutputs.entrySet()) {
Path path = e.getKey();
if (path.isDirectory()) {
listDirectoryContents(path, (file) -> builder.addActualOutputs(file), metadataProvider);
} else {
File.Builder outputBuilder = builder.addActualOutputsBuilder();
outputBuilder.setPath(path.relativeTo(execRoot).toString());
try {
outputBuilder.setDigest(computeDigest(e.getValue(), path, metadataProvider));
} catch (IOException ex) {
logger.log(Level.WARNING, "Error computing spawn event output properties", ex);
}
}
}
builder.setRemotable(Spawns.mayBeExecutedRemotely(spawn));
Platform execPlatform = PlatformUtils.getPlatformProto(spawn, remoteOptions);
if (execPlatform != null) {
builder.setPlatform(buildPlatform(execPlatform));
}
if (result.status() != SpawnResult.Status.SUCCESS) {
builder.setStatus(result.status().toString());
}
if (!timeout.isZero()) {
builder.setTimeoutMillis(timeout.toMillis());
}
builder.setCacheable(Spawns.mayBeCached(spawn));
builder.setExitCode(result.exitCode());
builder.setRemoteCacheHit(result.isCacheHit());
builder.setRunner(result.getRunnerName());
String progressMessage = spawn.getResourceOwner().getProgressMessage();
if (progressMessage != null) {
builder.setProgressMessage(progressMessage);
}
builder.setMnemonic(spawn.getMnemonic());
executionLog.write(builder.build());
}
public void close() throws IOException {
executionLog.close();
}
private static Protos.Platform buildPlatform(Platform platform) {
Protos.Platform.Builder platformBuilder = Protos.Platform.newBuilder();
for (Platform.Property p : platform.getPropertiesList()) {
platformBuilder.addPropertiesBuilder().setName(p.getName()).setValue(p.getValue());
}
return platformBuilder.build();
}
private SortedMap<Path, ActionInput> listExistingOutputs(Spawn spawn) {
TreeMap<Path, ActionInput> result = new TreeMap<>();
for (ActionInput output : spawn.getOutputFiles()) {
Path outputPath = execRoot.getRelative(output.getExecPathString());
// TODO(olaola): once symlink API proposal is implemented, report symlinks here.
if (outputPath.exists()) {
result.put(outputPath, output);
}
}
return result;
}
private void listDirectoryContents(
Path path, Consumer<File> addFile, MetadataProvider metadataProvider) {
try {
// TODO(olaola): once symlink API proposal is implemented, report symlinks here.
List<Dirent> sortedDirent = new ArrayList<>(path.readdir(Symlinks.NOFOLLOW));
sortedDirent.sort(Comparator.comparing(Dirent::getName));
for (Dirent dirent : sortedDirent) {
String name = dirent.getName();
Path child = path.getRelative(name);
if (dirent.getType() == Dirent.Type.DIRECTORY) {
listDirectoryContents(child, addFile, metadataProvider);
} else {
addFile.accept(
File.newBuilder()
.setPath(child.relativeTo(execRoot).toString())
.setDigest(computeDigest(null, child, metadataProvider))
.build());
}
}
} catch (IOException e) {
logger.log(Level.WARNING, "Error computing spawn event file properties", e);
}
}
/**
* Computes the digest of the given ActionInput or corresponding path. Will try to access the
* Metadata cache first, if it is available, and fall back to digesting the contents manually.
*/
private Digest computeDigest(
@Nullable ActionInput input, @Nullable Path path, MetadataProvider metadataProvider)
throws IOException {
Preconditions.checkArgument(input != null || path != null);
DigestHashFunction hashFunction = execRoot.getFileSystem().getDigestFunction();
Digest.Builder digest = Digest.newBuilder().setHashFunctionName(hashFunction.toString());
if (input != null) {
if (input instanceof VirtualActionInput) {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
((VirtualActionInput) input).writeTo(buffer);
byte[] blob = buffer.toByteArray();
return digest
.setHash(hashFunction.getHashFunction().hashBytes(blob).toString())
.setSizeBytes(blob.length)
.build();
}
// Try to access the cached metadata, otherwise fall back to local computation.
try {
FileArtifactValue metadata = metadataProvider.getMetadata(input);
if (metadata != null) {
byte[] hash = metadata.getDigest();
if (hash != null) {
return digest
.setHash(HashCode.fromBytes(hash).toString())
.setSizeBytes(metadata.getSize())
.build();
}
}
} catch (IOException | IllegalStateException e) {
// Pass through to local computation.
}
}
if (path == null) {
path = execRoot.getRelative(input.getExecPath());
}
// Compute digest manually.
return digest
.setHash(HashCode.fromBytes(path.getDigest()).toString())
.setSizeBytes(path.getFileSize())
.build();
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.cloudformation;
import com.amazonaws.services.cloudformation.model.*;
/**
* Abstract implementation of {@code AmazonCloudFormationAsync}. Convenient
* method forms pass through to the corresponding overload that takes a request
* object and an {@code AsyncHandler}, which throws an
* {@code UnsupportedOperationException}.
*/
public class AbstractAmazonCloudFormationAsync extends
AbstractAmazonCloudFormation implements AmazonCloudFormationAsync {
protected AbstractAmazonCloudFormationAsync() {
}
@Override
public java.util.concurrent.Future<Void> cancelUpdateStackAsync(
CancelUpdateStackRequest request) {
return cancelUpdateStackAsync(request, null);
}
@Override
public java.util.concurrent.Future<Void> cancelUpdateStackAsync(
CancelUpdateStackRequest request,
com.amazonaws.handlers.AsyncHandler<CancelUpdateStackRequest, Void> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateStackResult> createStackAsync(
CreateStackRequest request) {
return createStackAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateStackResult> createStackAsync(
CreateStackRequest request,
com.amazonaws.handlers.AsyncHandler<CreateStackRequest, CreateStackResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<Void> deleteStackAsync(
DeleteStackRequest request) {
return deleteStackAsync(request, null);
}
@Override
public java.util.concurrent.Future<Void> deleteStackAsync(
DeleteStackRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteStackRequest, Void> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeAccountLimitsResult> describeAccountLimitsAsync(
DescribeAccountLimitsRequest request) {
return describeAccountLimitsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeAccountLimitsResult> describeAccountLimitsAsync(
DescribeAccountLimitsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeAccountLimitsRequest, DescribeAccountLimitsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeStackEventsResult> describeStackEventsAsync(
DescribeStackEventsRequest request) {
return describeStackEventsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeStackEventsResult> describeStackEventsAsync(
DescribeStackEventsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeStackEventsRequest, DescribeStackEventsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeStackResourceResult> describeStackResourceAsync(
DescribeStackResourceRequest request) {
return describeStackResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeStackResourceResult> describeStackResourceAsync(
DescribeStackResourceRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeStackResourceRequest, DescribeStackResourceResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeStackResourcesResult> describeStackResourcesAsync(
DescribeStackResourcesRequest request) {
return describeStackResourcesAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeStackResourcesResult> describeStackResourcesAsync(
DescribeStackResourcesRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeStackResourcesRequest, DescribeStackResourcesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeStacksResult> describeStacksAsync(
DescribeStacksRequest request) {
return describeStacksAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeStacksResult> describeStacksAsync(
DescribeStacksRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeStacksRequest, DescribeStacksResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeStacks operation.
*
* @see #describeStacksAsync(DescribeStacksRequest)
*/
@Override
public java.util.concurrent.Future<DescribeStacksResult> describeStacksAsync() {
return describeStacksAsync(new DescribeStacksRequest());
}
/**
* Simplified method form for invoking the DescribeStacks operation with an
* AsyncHandler.
*
* @see #describeStacksAsync(DescribeStacksRequest,
* com.amazonaws.handlers.AsyncHandler)
*/
public java.util.concurrent.Future<DescribeStacksResult> describeStacksAsync(
com.amazonaws.handlers.AsyncHandler<DescribeStacksRequest, DescribeStacksResult> asyncHandler) {
return describeStacksAsync(new DescribeStacksRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<EstimateTemplateCostResult> estimateTemplateCostAsync(
EstimateTemplateCostRequest request) {
return estimateTemplateCostAsync(request, null);
}
@Override
public java.util.concurrent.Future<EstimateTemplateCostResult> estimateTemplateCostAsync(
EstimateTemplateCostRequest request,
com.amazonaws.handlers.AsyncHandler<EstimateTemplateCostRequest, EstimateTemplateCostResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the EstimateTemplateCost operation.
*
* @see #estimateTemplateCostAsync(EstimateTemplateCostRequest)
*/
@Override
public java.util.concurrent.Future<EstimateTemplateCostResult> estimateTemplateCostAsync() {
return estimateTemplateCostAsync(new EstimateTemplateCostRequest());
}
/**
* Simplified method form for invoking the EstimateTemplateCost operation
* with an AsyncHandler.
*
* @see #estimateTemplateCostAsync(EstimateTemplateCostRequest,
* com.amazonaws.handlers.AsyncHandler)
*/
public java.util.concurrent.Future<EstimateTemplateCostResult> estimateTemplateCostAsync(
com.amazonaws.handlers.AsyncHandler<EstimateTemplateCostRequest, EstimateTemplateCostResult> asyncHandler) {
return estimateTemplateCostAsync(new EstimateTemplateCostRequest(),
asyncHandler);
}
@Override
public java.util.concurrent.Future<GetStackPolicyResult> getStackPolicyAsync(
GetStackPolicyRequest request) {
return getStackPolicyAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetStackPolicyResult> getStackPolicyAsync(
GetStackPolicyRequest request,
com.amazonaws.handlers.AsyncHandler<GetStackPolicyRequest, GetStackPolicyResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetTemplateResult> getTemplateAsync(
GetTemplateRequest request) {
return getTemplateAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetTemplateResult> getTemplateAsync(
GetTemplateRequest request,
com.amazonaws.handlers.AsyncHandler<GetTemplateRequest, GetTemplateResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetTemplateSummaryResult> getTemplateSummaryAsync(
GetTemplateSummaryRequest request) {
return getTemplateSummaryAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetTemplateSummaryResult> getTemplateSummaryAsync(
GetTemplateSummaryRequest request,
com.amazonaws.handlers.AsyncHandler<GetTemplateSummaryRequest, GetTemplateSummaryResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the GetTemplateSummary operation.
*
* @see #getTemplateSummaryAsync(GetTemplateSummaryRequest)
*/
@Override
public java.util.concurrent.Future<GetTemplateSummaryResult> getTemplateSummaryAsync() {
return getTemplateSummaryAsync(new GetTemplateSummaryRequest());
}
/**
* Simplified method form for invoking the GetTemplateSummary operation with
* an AsyncHandler.
*
* @see #getTemplateSummaryAsync(GetTemplateSummaryRequest,
* com.amazonaws.handlers.AsyncHandler)
*/
public java.util.concurrent.Future<GetTemplateSummaryResult> getTemplateSummaryAsync(
com.amazonaws.handlers.AsyncHandler<GetTemplateSummaryRequest, GetTemplateSummaryResult> asyncHandler) {
return getTemplateSummaryAsync(new GetTemplateSummaryRequest(),
asyncHandler);
}
@Override
public java.util.concurrent.Future<ListStackResourcesResult> listStackResourcesAsync(
ListStackResourcesRequest request) {
return listStackResourcesAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListStackResourcesResult> listStackResourcesAsync(
ListStackResourcesRequest request,
com.amazonaws.handlers.AsyncHandler<ListStackResourcesRequest, ListStackResourcesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListStacksResult> listStacksAsync(
ListStacksRequest request) {
return listStacksAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListStacksResult> listStacksAsync(
ListStacksRequest request,
com.amazonaws.handlers.AsyncHandler<ListStacksRequest, ListStacksResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the ListStacks operation.
*
* @see #listStacksAsync(ListStacksRequest)
*/
@Override
public java.util.concurrent.Future<ListStacksResult> listStacksAsync() {
return listStacksAsync(new ListStacksRequest());
}
/**
* Simplified method form for invoking the ListStacks operation with an
* AsyncHandler.
*
* @see #listStacksAsync(ListStacksRequest,
* com.amazonaws.handlers.AsyncHandler)
*/
public java.util.concurrent.Future<ListStacksResult> listStacksAsync(
com.amazonaws.handlers.AsyncHandler<ListStacksRequest, ListStacksResult> asyncHandler) {
return listStacksAsync(new ListStacksRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<Void> setStackPolicyAsync(
SetStackPolicyRequest request) {
return setStackPolicyAsync(request, null);
}
@Override
public java.util.concurrent.Future<Void> setStackPolicyAsync(
SetStackPolicyRequest request,
com.amazonaws.handlers.AsyncHandler<SetStackPolicyRequest, Void> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<Void> signalResourceAsync(
SignalResourceRequest request) {
return signalResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<Void> signalResourceAsync(
SignalResourceRequest request,
com.amazonaws.handlers.AsyncHandler<SignalResourceRequest, Void> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateStackResult> updateStackAsync(
UpdateStackRequest request) {
return updateStackAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateStackResult> updateStackAsync(
UpdateStackRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateStackRequest, UpdateStackResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ValidateTemplateResult> validateTemplateAsync(
ValidateTemplateRequest request) {
return validateTemplateAsync(request, null);
}
@Override
public java.util.concurrent.Future<ValidateTemplateResult> validateTemplateAsync(
ValidateTemplateRequest request,
com.amazonaws.handlers.AsyncHandler<ValidateTemplateRequest, ValidateTemplateResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
}
| |
/**
* Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.kaazing.gateway.transport.http.security.auth.challenge;
import static org.junit.Assert.assertEquals;
import static org.kaazing.gateway.resource.address.ResourceAddress.NEXT_PROTOCOL;
import static org.kaazing.gateway.resource.address.http.HttpResourceAddress.REALM_CHALLENGE_SCHEME;
import static org.kaazing.gateway.resource.address.http.HttpResourceAddress.REALM_DESCRIPTION;
import org.jmock.Expectations;
import org.jmock.Mockery;
import org.jmock.lib.legacy.ClassImposteriser;
import org.junit.Before;
import org.junit.Test;
import org.kaazing.gateway.resource.address.ResourceAddress;
import org.kaazing.gateway.transport.http.HttpStatus;
import org.kaazing.gateway.transport.http.bridge.HttpRequestMessage;
import org.kaazing.gateway.transport.http.bridge.HttpResponseMessage;
public class BasicHttpChallengeFactoryTest {
BasicHttpChallengeFactory factory;
Mockery context;
@Before
public void setUp() throws Exception {
factory = new BasicHttpChallengeFactory();
context = new Mockery() {
{
setImposteriser(ClassImposteriser.INSTANCE);
}
};
}
@Test
public void canBuildASimpleChallenge() throws Exception {
final HttpRequestMessage request = new HttpRequestMessage();
final ResourceAddress address = context.mock(ResourceAddress.class);
request.setLocalAddress(address);
context.checking(new Expectations() {
{
allowing(address).getOption(REALM_DESCRIPTION);
will(returnValue("Realm Description"));
allowing(address).getOption(REALM_CHALLENGE_SCHEME);
will(returnValue("Basic"));
}
});
HttpResponseMessage response = factory.createChallenge(request);
context.assertIsSatisfied();
assertEquals(HttpStatus.CLIENT_UNAUTHORIZED, response.getStatus());
assertEquals("Basic realm=\"Realm Description\"", response.getHeader("WWW-Authenticate"));
}
@Test
public void canBuildASimpleChallengeWithParams() throws Exception {
final HttpRequestMessage request = new HttpRequestMessage();
final ResourceAddress address = context.mock(ResourceAddress.class);
request.setLocalAddress(address);
context.checking(new Expectations() {
{
allowing(address).getOption(REALM_DESCRIPTION);
will(returnValue("Realm Description"));
allowing(address).getOption(REALM_CHALLENGE_SCHEME);
will(returnValue("Basic"));
}
});
Object[] params = new Object[] { "foo=\"bar\"", "baz=\"quxx\"" };
HttpResponseMessage response = factory.createChallenge(request, params);
context.assertIsSatisfied();
assertEquals(HttpStatus.CLIENT_UNAUTHORIZED, response.getStatus());
String expected = "Basic realm=\"Realm Description\" foo=\"bar\" baz=\"quxx\"";
assertEquals(expected, response.getHeader("WWW-Authenticate"));
}
@Test
public void canBuildASimpleChallengeWithNullParams() throws Exception {
final HttpRequestMessage request = new HttpRequestMessage();
final ResourceAddress address = context.mock(ResourceAddress.class);
request.setLocalAddress(address);
context.checking(new Expectations() {
{
allowing(address).getOption(REALM_DESCRIPTION);
will(returnValue("Realm Description"));
allowing(address).getOption(REALM_CHALLENGE_SCHEME);
will(returnValue("Basic"));
}
});
Object[] params = null;
HttpResponseMessage response = factory.createChallenge(request, params);
context.assertIsSatisfied();
assertEquals(HttpStatus.CLIENT_UNAUTHORIZED, response.getStatus());
String expected = "Basic realm=\"Realm Description\"";
assertEquals(expected, response.getHeader("WWW-Authenticate"));
}
@Test
public void canBuildAnApplicationChallenge() throws Exception {
final HttpRequestMessage request = new HttpRequestMessage();
final ResourceAddress address = context.mock(ResourceAddress.class);
final ResourceAddress transportAddress = context.mock(ResourceAddress.class, "transportAddress");
request.setLocalAddress(address);
context.checking(new Expectations() {
{
allowing(address).getOption(REALM_DESCRIPTION);
will(returnValue("Realm Description"));
allowing(address).getOption(REALM_CHALLENGE_SCHEME);
will(returnValue("Application Basic"));
allowing(address).getOption(NEXT_PROTOCOL);
will(returnValue("ws/rfc7455"));
allowing(address).getTransport();
will(returnValue(transportAddress));
allowing(transportAddress).getOption(NEXT_PROTOCOL);
will(returnValue("http/1.1"));
}
});
HttpResponseMessage response = factory.createChallenge(request);
context.assertIsSatisfied();
assertEquals(HttpStatus.CLIENT_UNAUTHORIZED, response.getStatus());
assertEquals("Application Basic realm=\"Realm Description\"", response.getHeader("WWW-Authenticate"));
}
@Test
public void canBuildAnApplicationChallengeWithParams() throws Exception {
final HttpRequestMessage request = new HttpRequestMessage();
final ResourceAddress address = context.mock(ResourceAddress.class);
final ResourceAddress transportAddress = context.mock(ResourceAddress.class, "transportAddress");
request.setLocalAddress(address);
context.checking(new Expectations() {
{
allowing(address).getOption(REALM_DESCRIPTION);
will(returnValue("Realm Description"));
allowing(address).getOption(REALM_CHALLENGE_SCHEME);
will(returnValue("Application Basic"));
allowing(address).getOption(NEXT_PROTOCOL);
will(returnValue("ws/rfc7455"));
allowing(address).getTransport();
will(returnValue(transportAddress));
allowing(transportAddress).getOption(NEXT_PROTOCOL);
will(returnValue("http/1.1"));
}
});
Object[] params = new Object[] { "foo=\"bar\"", "baz=\"quxx\"" };
HttpResponseMessage response = factory.createChallenge(request, params);
context.assertIsSatisfied();
assertEquals(HttpStatus.CLIENT_UNAUTHORIZED, response.getStatus());
String expected = "Application Basic realm=\"Realm Description\" foo=\"bar\" baz=\"quxx\"";
assertEquals(expected, response.getHeader("WWW-Authenticate"));
}
@Test
public void canBuildAnApplicationChallengeWithNullParams() throws Exception {
final HttpRequestMessage request = new HttpRequestMessage();
final ResourceAddress address = context.mock(ResourceAddress.class);
final ResourceAddress transportAddress = context.mock(ResourceAddress.class, "transportAddress");
request.setLocalAddress(address);
context.checking(new Expectations() {
{
allowing(address).getOption(REALM_DESCRIPTION);
will(returnValue("Realm Description"));
allowing(address).getOption(REALM_CHALLENGE_SCHEME);
will(returnValue("Application Basic"));
allowing(address).getOption(NEXT_PROTOCOL);
will(returnValue("ws/rfc7455"));
allowing(address).getTransport();
will(returnValue(transportAddress));
allowing(transportAddress).getOption(NEXT_PROTOCOL);
will(returnValue("http/1.1"));
}
});
Object[] params = null;
HttpResponseMessage response = factory.createChallenge(request, params);
context.assertIsSatisfied();
assertEquals(HttpStatus.CLIENT_UNAUTHORIZED, response.getStatus());
String expected = "Application Basic realm=\"Realm Description\"";
assertEquals(expected, response.getHeader("WWW-Authenticate"));
}
@Test
public void canBuildAChallengeWhenAuthTypeIsNull() throws Exception {
final HttpRequestMessage request = new HttpRequestMessage();
final ResourceAddress address = context.mock(ResourceAddress.class);
request.setLocalAddress(address);
context.checking(new Expectations() {
{
allowing(address).getOption(REALM_DESCRIPTION);
will(returnValue("Realm Description"));
allowing(address).getOption(REALM_CHALLENGE_SCHEME);
will(returnValue(null));
}
});
HttpResponseMessage response = factory.createChallenge(request);
context.assertIsSatisfied();
assertEquals(HttpStatus.CLIENT_UNAUTHORIZED, response.getStatus());
assertEquals("Basic realm=\"Realm Description\"", response.getHeader("WWW-Authenticate"));
}
}
| |
/**
* Copyright 2011 The PlayN Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package playn.core.json;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.Stack;
import playn.core.Json;
/**
* Internal class that handles emitting to an {@link Appendable}. Users only see the public
* subclasses, {@link JsonStringWriter} and {@link JsonAppendableWriter}.
*
* @param <SELF> A subclass of {@link JsonSink}.
*/
class JsonWriterBase<SELF extends JsonSink<SELF>> implements JsonSink<SELF> {
protected final Appendable appendable;
private Stack<Boolean> states = new Stack<Boolean>();
private boolean first = true;
private boolean inObject;
JsonWriterBase(Appendable appendable) {
this.appendable = appendable;
}
/**
* This is guaranteed to be safe as the type of "this" will always be the type of "SELF".
*/
@SuppressWarnings("unchecked")
private SELF castThis() {
return (SELF) this;
}
@Override
public SELF array(Collection<?> c) {
return array(null, c);
}
@Override
public SELF array(Json.Array c) {
return array(null, c);
}
@Override
public SELF array(String key, Collection<?> c) {
if (key == null)
array();
else
array(key);
for (Object o : c) {
value(o);
}
return end();
}
@Override
public SELF array(String key, Json.Array c) {
if (key == null)
array();
else
array(key);
c.write(this);
return end();
}
@Override
public SELF object(Map<?, ?> map) {
return object(null, map);
}
@Override
public SELF object(Json.Object map) {
return object(null, map);
}
@Override
public SELF object(String key, Map<?, ?> map) {
if (key == null)
object();
else
object(key);
for (Map.Entry<?, ?> entry : map.entrySet()) {
Object o = entry.getValue();
if (!(entry.getKey() instanceof String))
throw new JsonWriterException("Invalid key type for map: "
+ (entry.getKey() == null ? "null" : entry.getKey().getClass()));
String k = (String) entry.getKey();
value(k, o);
}
return end();
}
@Override
public SELF object(String key, Json.Object obj) {
if (key == null)
object();
else
object(key);
obj.write(this);
return end();
}
@Override
public SELF nul() {
preValue();
raw("null");
return castThis();
}
@Override
public SELF nul(String key) {
preValue(key);
raw("null");
return castThis();
}
@Override
public SELF value(Object o) {
if (o == null)
return nul();
else if (o instanceof String)
return value((String) o);
else if (o instanceof Number)
return value(((Number) o));
else if (o instanceof Boolean)
return value((boolean) (Boolean) o);
else if (o instanceof Collection)
return array((Collection<?>) o);
else if (o instanceof Map)
return object((Map<?, ?>) o);
else if (JsonTypes.isArray(o))
return array((Json.Array)o);
else if (JsonTypes.isObject(o))
return object((Json.Object)o);
// TODO(mmastrac): Implement in future playn-server
// else if (o.getClass().isArray()) {
// int length = Array.getLength(o);
// array();
// for (int i = 0; i < length; i++)
// value(Array.get(o, i));
// return end();
else
throw new JsonWriterException("Unable to handle type: " + o.getClass());
}
@Override
public SELF value(String key, Object o) {
if (o == null)
return nul(key);
else if (o instanceof String)
return value(key, (String) o);
else if (o instanceof Number)
return value(key, (Number) o);
else if (o instanceof Boolean)
return value(key, (boolean) (Boolean) o);
else if (o instanceof Collection)
return array(key, (Collection<?>) o);
else if (o instanceof Map)
return object(key, (Map<?, ?>) o);
else if (JsonTypes.isArray(o))
return array(key, (Json.Array)o);
else if (JsonTypes.isObject(o))
return object(key, (Json.Object)o);
// TODO(mmastrac): Implement in future playn-server
// else if (o.getClass().isArray()) {
// int length = Array.getLength(o);
// array(key);
// for (int i = 0; i < length; i++)
// value(Array.get(o, i));
// return end();
else
throw new JsonWriterException("Unable to handle type: " + o.getClass());
}
@Override
public SELF value(String s) {
if (s == null)
return nul();
preValue();
emitStringValue(s);
return castThis();
}
@Override
public SELF value(int i) {
preValue();
raw(Integer.toString(i));
return castThis();
}
@Override
public SELF value(boolean b) {
preValue();
raw(Boolean.toString(b));
return castThis();
}
@Override
public SELF value(double d) {
preValue();
raw(Double.toString(d));
return castThis();
}
@Override
public SELF value(float d) {
preValue();
raw(Float.toString(d));
return castThis();
}
@Override
public SELF value(Number n) {
preValue();
if (n == null)
raw("null");
else
raw(n.toString());
return castThis();
}
@Override
public SELF value(String key, String s) {
if (s == null)
return nul(key);
preValue(key);
emitStringValue(s);
return castThis();
}
@Override
public SELF value(String key, int i) {
preValue(key);
raw(Integer.toString(i));
return castThis();
}
@Override
public SELF value(String key, boolean b) {
preValue(key);
raw(Boolean.toString(b));
return castThis();
}
@Override
public SELF value(String key, double d) {
preValue(key);
raw(Double.toString(d));
return castThis();
}
@Override
public SELF value(String key, float d) {
preValue(key);
raw(Float.toString(d));
return castThis();
}
@Override
public SELF value(String key, Number n) {
if (n == null)
return nul(key);
preValue(key);
raw(n.toString());
return castThis();
}
@Override
public SELF array() {
preValue();
states.push(inObject);
inObject = false;
first = true;
raw('[');
return castThis();
}
@Override
public SELF object() {
preValue();
states.push(inObject);
inObject = true;
first = true;
raw('{');
return castThis();
}
@Override
public SELF array(String key) {
preValue(key);
states.push(inObject);
inObject = false;
first = true;
raw('[');
return castThis();
}
@Override
public SELF object(String key) {
preValue(key);
states.push(inObject);
inObject = true;
first = true;
raw('{');
return castThis();
}
@Override
public SELF end() {
if (states.size() == 0)
throw new JsonWriterException("Invalid call to end()");
if (inObject) {
raw('}');
} else {
raw(']');
}
first = false;
inObject = states.pop();
return castThis();
}
/**
* Ensures that the object is in the finished state.
*
* @throws JsonWriterException if the written JSON is not properly balanced, ie: all arrays and
* objects that were started have been properly ended.
*/
protected void doneInternal() {
if (states.size() > 0)
throw new JsonWriterException("Unclosed JSON objects and/or arrays when closing writer");
if (first)
throw new JsonWriterException("Nothing was written to the JSON writer");
}
private void raw(String s) {
try {
appendable.append(s);
} catch (IOException e) {
throw new JsonWriterException(e);
}
}
private void raw(char c) {
try {
appendable.append(c);
} catch (IOException e) {
throw new JsonWriterException(e);
}
}
private void pre() {
if (first) {
first = false;
} else {
if (states.size() == 0)
throw new JsonWriterException("Invalid call to emit a value in a finished JSON writer");
raw(',');
}
}
private void preValue() {
if (inObject)
throw new JsonWriterException("Invalid call to emit a keyless value while writing an object");
pre();
}
private void preValue(String key) {
if (!inObject)
throw new JsonWriterException("Invalid call to emit a key value while not writing an object");
pre();
emitStringValue(key);
raw(':');
}
/**
* Emits a quoted string value, escaping characters that are required to be escaped.
*/
private void emitStringValue(String s) {
raw('"');
char b = 0, c = 0;
for (int i = 0; i < s.length(); i++) {
b = c;
c = s.charAt(i);
switch (c) {
case '\\':
case '"':
raw('\\');
raw(c);
break;
case '/':
// Special case to ensure that </script> doesn't appear in JSON
// output
if (b == '<')
raw('\\');
raw(c);
break;
case '\b':
raw("\\b");
break;
case '\t':
raw("\\t");
break;
case '\n':
raw("\\n");
break;
case '\f':
raw("\\f");
break;
case '\r':
raw("\\r");
break;
default:
if (shouldBeEscaped(c)) {
String t = "000" + Integer.toHexString(c);
raw("\\u" + t.substring(t.length() - "0000".length()));
} else {
raw(c);
}
}
}
raw('"');
}
/**
* json.org spec says that all control characters must be escaped.
*/
private boolean shouldBeEscaped(char c) {
return c < ' ' || (c >= '\u0080' && c < '\u00a0') || (c >= '\u2000' && c < '\u2100');
}
/**
* Used for testing.
*/
static String escape(String s) {
String json = new JsonStringWriter().value(s).write();
return json.substring(1, json.length() - 1);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.benchmark;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import org.apache.druid.benchmark.datagen.BenchmarkColumnSchema;
import org.apache.druid.benchmark.datagen.BenchmarkSchemaInfo;
import org.apache.druid.benchmark.datagen.SegmentGenerator;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.js.JavaScriptConfig;
import org.apache.druid.query.aggregation.BufferAggregator;
import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory;
import org.apache.druid.query.aggregation.JavaScriptAggregatorFactory;
import org.apache.druid.query.expression.TestExprMacroTable;
import org.apache.druid.segment.BaseFloatColumnValueSelector;
import org.apache.druid.segment.ColumnSelectorFactory;
import org.apache.druid.segment.Cursor;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.QueryableIndexStorageAdapter;
import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
@State(Scope.Benchmark)
@Fork(value = 1)
@Warmup(iterations = 15)
@Measurement(iterations = 30)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
public class ExpressionAggregationBenchmark
{
@Param({"1000000"})
private int rowsPerSegment;
private SegmentGenerator segmentGenerator;
private QueryableIndex index;
private JavaScriptAggregatorFactory javaScriptAggregatorFactory;
private DoubleSumAggregatorFactory expressionAggregatorFactory;
private ByteBuffer aggregationBuffer = ByteBuffer.allocate(Double.BYTES);
@Setup(Level.Trial)
public void setup()
{
final BenchmarkSchemaInfo schemaInfo = new BenchmarkSchemaInfo(
ImmutableList.of(
BenchmarkColumnSchema.makeNormal("x", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false),
BenchmarkColumnSchema.makeNormal("y", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false)
),
ImmutableList.of(),
Intervals.of("2000/P1D"),
false
);
final DataSegment dataSegment = DataSegment.builder()
.dataSource("foo")
.interval(schemaInfo.getDataInterval())
.version("1")
.shardSpec(new LinearShardSpec(0))
.build();
this.segmentGenerator = new SegmentGenerator();
this.index = segmentGenerator.generate(dataSegment, schemaInfo, Granularities.NONE, rowsPerSegment);
this.javaScriptAggregatorFactory = new JavaScriptAggregatorFactory(
"name",
ImmutableList.of("x", "y"),
"function(current,x,y) { if (x > 0) { return current + x + 1 } else { return current + y + 1 } }",
"function() { return 0 }",
"function(a,b) { return a + b }",
JavaScriptConfig.getEnabledInstance()
);
this.expressionAggregatorFactory = new DoubleSumAggregatorFactory(
"name",
null,
"if(x>0,1.0+x,y+1)",
TestExprMacroTable.INSTANCE
);
}
@TearDown(Level.Trial)
public void tearDown() throws Exception
{
if (index != null) {
index.close();
index = null;
}
if (segmentGenerator != null) {
segmentGenerator.close();
segmentGenerator = null;
}
}
@Benchmark
public void queryUsingJavaScript(Blackhole blackhole)
{
final Double result = compute(javaScriptAggregatorFactory::factorizeBuffered);
blackhole.consume(result);
}
@Benchmark
public void queryUsingExpression(Blackhole blackhole)
{
final Double result = compute(expressionAggregatorFactory::factorizeBuffered);
blackhole.consume(result);
}
@Benchmark
public void queryUsingNative(Blackhole blackhole)
{
final Double result = compute(
columnSelectorFactory ->
new NativeBufferAggregator(
columnSelectorFactory.makeColumnValueSelector("x"),
columnSelectorFactory.makeColumnValueSelector("y")
)
);
blackhole.consume(result);
}
private double compute(final Function<ColumnSelectorFactory, BufferAggregator> aggregatorFactory)
{
final QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(index);
final Sequence<Cursor> cursors = adapter.makeCursors(
null,
index.getDataInterval(),
VirtualColumns.EMPTY,
Granularities.ALL,
false,
null
);
final List<Double> results = cursors
.map(cursor -> {
final BufferAggregator bufferAggregator = aggregatorFactory.apply(cursor.getColumnSelectorFactory());
bufferAggregator.init(aggregationBuffer, 0);
while (!cursor.isDone()) {
bufferAggregator.aggregate(aggregationBuffer, 0);
cursor.advance();
}
final Double dbl = (Double) bufferAggregator.get(aggregationBuffer, 0);
bufferAggregator.close();
return dbl;
})
.toList();
return Iterables.getOnlyElement(results);
}
private static class NativeBufferAggregator implements BufferAggregator
{
private final BaseFloatColumnValueSelector xSelector;
private final BaseFloatColumnValueSelector ySelector;
public NativeBufferAggregator(
final BaseFloatColumnValueSelector xSelector,
final BaseFloatColumnValueSelector ySelector
)
{
this.xSelector = xSelector;
this.ySelector = ySelector;
}
@Override
public void init(final ByteBuffer buf, final int position)
{
buf.putDouble(0, 0d);
}
@Override
public void aggregate(final ByteBuffer buf, final int position)
{
final float x = xSelector.getFloat();
final double n = x > 0 ? x + 1 : ySelector.getFloat() + 1;
buf.putDouble(0, buf.getDouble(position) + n);
}
@Override
public Object get(final ByteBuffer buf, final int position)
{
return buf.getDouble(position);
}
@Override
public float getFloat(final ByteBuffer buf, final int position)
{
throw new UnsupportedOperationException();
}
@Override
public long getLong(final ByteBuffer buf, final int position)
{
throw new UnsupportedOperationException();
}
@Override
public double getDouble(ByteBuffer buf, int position)
{
throw new UnsupportedOperationException();
}
@Override
public void close()
{
}
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.components.impl;
import com.intellij.ide.plugins.PluginManager;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.ComponentManager;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.components.ServiceDescriptor;
import com.intellij.openapi.components.ex.ComponentManagerEx;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.*;
import com.intellij.openapi.extensions.impl.ExtensionComponentAdapter;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.PlatformUtils;
import com.intellij.util.io.storage.HeavyProcessLatch;
import com.intellij.util.pico.AssignableToComponentAdapter;
import com.intellij.util.pico.CachingConstructorInjectionComponentAdapter;
import com.intellij.util.pico.DefaultPicoContainer;
import org.jetbrains.annotations.NotNull;
import org.picocontainer.*;
import org.picocontainer.defaults.InstanceComponentAdapter;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.function.BiPredicate;
public class ServiceManagerImpl implements Disposable {
private static final Logger LOG = Logger.getInstance(ServiceManagerImpl.class);
private static final ExtensionPointName<ServiceDescriptor> APP_SERVICES = new ExtensionPointName<>("com.intellij.applicationService");
private static final ExtensionPointName<ServiceDescriptor> PROJECT_SERVICES = new ExtensionPointName<>("com.intellij.projectService");
private ExtensionPointName<ServiceDescriptor> myExtensionPointName;
private ExtensionPointListener<ServiceDescriptor> myExtensionPointListener;
public ServiceManagerImpl() {
installEP(APP_SERVICES, ApplicationManager.getApplication());
}
public ServiceManagerImpl(Project project) {
installEP(PROJECT_SERVICES, project);
}
protected ServiceManagerImpl(boolean ignoreInit) {
}
protected void installEP(@NotNull ExtensionPointName<ServiceDescriptor> pointName, @NotNull final ComponentManager componentManager) {
LOG.assertTrue(myExtensionPointName == null, "Already called installEP with " + myExtensionPointName);
myExtensionPointName = pointName;
final ExtensionPoint<ServiceDescriptor> extensionPoint = Extensions.getArea(null).getExtensionPoint(pointName);
final MutablePicoContainer picoContainer = (MutablePicoContainer)componentManager.getPicoContainer();
myExtensionPointListener = new ExtensionPointListener<ServiceDescriptor>() {
@Override
public void extensionAdded(@NotNull final ServiceDescriptor descriptor, final PluginDescriptor pluginDescriptor) {
if (descriptor.overrides) {
// Allow to re-define service implementations in plugins.
ComponentAdapter oldAdapter = picoContainer.unregisterComponent(descriptor.getInterface());
if (oldAdapter == null) {
throw new RuntimeException("Service: " + descriptor.getInterface() + " doesn't override anything");
}
}
if (!Extensions.isComponentSuitableForOs(descriptor.os)) {
return;
}
// empty serviceImplementation means we want to unregister service
if (!StringUtil.isEmpty(descriptor.getImplementation())) {
picoContainer.registerComponent(new MyComponentAdapter(descriptor, pluginDescriptor, (ComponentManagerEx)componentManager));
}
}
@Override
public void extensionRemoved(@NotNull final ServiceDescriptor extension, final PluginDescriptor pluginDescriptor) {
picoContainer.unregisterComponent(extension.getInterface());
}
};
extensionPoint.addExtensionPointListener(myExtensionPointListener);
}
public List<ServiceDescriptor> getAllDescriptors() {
ServiceDescriptor[] extensions = Extensions.getExtensions(myExtensionPointName);
return Arrays.asList(extensions);
}
public static void processAllImplementationClasses(@NotNull ComponentManagerImpl componentManager, @NotNull BiPredicate<Class<?>, PluginDescriptor> processor) {
Collection adapters = componentManager.getPicoContainer().getComponentAdapters();
if (adapters.isEmpty()) {
return;
}
for (Object o : adapters) {
Class aClass;
if (o instanceof MyComponentAdapter) {
MyComponentAdapter adapter = (MyComponentAdapter)o;
PluginDescriptor pluginDescriptor = adapter.myPluginDescriptor;
try {
ComponentAdapter delegate = adapter.myDelegate;
// avoid delegation creation & class initialization
if (delegate == null) {
ClassLoader classLoader = pluginDescriptor == null ? ServiceManagerImpl.class.getClassLoader() : pluginDescriptor.getPluginClassLoader();
aClass = Class.forName(adapter.myDescriptor.getImplementation(), false, classLoader);
}
else {
aClass = delegate.getComponentImplementation();
}
}
catch (Throwable e) {
if (PlatformUtils.isIdeaUltimate()) {
LOG.error(e);
}
else {
// well, component registered, but required jar is not added to classpath (community edition or junior IDE)
LOG.warn(e);
}
continue;
}
if (!processor.test(aClass, pluginDescriptor)) {
break;
}
}
else if (o instanceof ComponentAdapter && !(o instanceof ExtensionComponentAdapter)) {
PluginId pluginId = componentManager.getConfig((ComponentAdapter)o);
// allow InstanceComponentAdapter without pluginId to test
if (pluginId != null || o instanceof InstanceComponentAdapter) {
try {
aClass = ((ComponentAdapter)o).getComponentImplementation();
}
catch (Throwable e) {
LOG.error(e);
continue;
}
processor.test(aClass, pluginId == null ? null : PluginManager.getPlugin(pluginId));
}
}
}
}
@Override
public void dispose() {
final ExtensionPoint<ServiceDescriptor> extensionPoint = Extensions.getArea(null).getExtensionPoint(myExtensionPointName);
extensionPoint.removeExtensionPointListener(myExtensionPointListener);
}
private static class MyComponentAdapter implements AssignableToComponentAdapter, DefaultPicoContainer.LazyComponentAdapter {
private ComponentAdapter myDelegate;
private final ServiceDescriptor myDescriptor;
private final PluginDescriptor myPluginDescriptor;
private final ComponentManagerEx myComponentManager;
private volatile Object myInitializedComponentInstance;
public MyComponentAdapter(final ServiceDescriptor descriptor, final PluginDescriptor pluginDescriptor, ComponentManagerEx componentManager) {
myDescriptor = descriptor;
myPluginDescriptor = pluginDescriptor;
myComponentManager = componentManager;
myDelegate = null;
}
@Override
public String getComponentKey() {
return myDescriptor.getInterface();
}
@Override
public Class getComponentImplementation() {
return getDelegate().getComponentImplementation();
}
@Override
public boolean isComponentInstantiated() {
return myInitializedComponentInstance != null;
}
@Override
public Object getComponentInstance(@NotNull PicoContainer container) throws PicoInitializationException, PicoIntrospectionException {
Object instance = myInitializedComponentInstance;
if (instance != null) {
return instance;
}
synchronized (this) {
instance = myInitializedComponentInstance;
if (instance != null) {
// DCL is fine, field is volatile
return instance;
}
ComponentAdapter delegate = getDelegate();
if (LOG.isDebugEnabled() &&
ApplicationManager.getApplication().isWriteAccessAllowed() &&
!ApplicationManager.getApplication().isUnitTestMode() &&
PersistentStateComponent.class.isAssignableFrom(delegate.getComponentImplementation())) {
LOG.warn(new Throwable("Getting service from write-action leads to possible deadlock. Service implementation " + myDescriptor.getImplementation()));
}
// prevent storages from flushing and blocking FS
AccessToken token = HeavyProcessLatch.INSTANCE.processStarted("Creating component '" + myDescriptor.getImplementation() + "'");
try {
instance = delegate.getComponentInstance(container);
if (instance instanceof Disposable) {
Disposer.register(myComponentManager, (Disposable)instance);
}
myComponentManager.initializeComponent(instance, true);
myInitializedComponentInstance = instance;
return instance;
}
finally {
token.finish();
}
}
}
@NotNull
private synchronized ComponentAdapter getDelegate() {
if (myDelegate == null) {
Class<?> implClass;
try {
ClassLoader classLoader = myPluginDescriptor != null ? myPluginDescriptor.getPluginClassLoader() : getClass().getClassLoader();
implClass = Class.forName(myDescriptor.getImplementation(), true, classLoader);
}
catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
myDelegate = new CachingConstructorInjectionComponentAdapter(getComponentKey(), implClass, null, true);
}
return myDelegate;
}
@Override
public void verify(final PicoContainer container) throws PicoIntrospectionException {
getDelegate().verify(container);
}
@Override
public void accept(final PicoVisitor visitor) {
visitor.visitComponentAdapter(this);
}
@Override
public String getAssignableToClassName() {
return myDescriptor.getInterface();
}
@Override
public String toString() {
return "ServiceComponentAdapter[" + myDescriptor.getInterface() + "]: implementation=" + myDescriptor.getImplementation() + ", plugin=" + myPluginDescriptor;
}
}
}
| |
package org.docksidestage.postgresql.dbflute.bsentity.customize;
import java.util.List;
import java.util.ArrayList;
import org.dbflute.dbmeta.DBMeta;
import org.dbflute.dbmeta.AbstractEntity;
import org.dbflute.dbmeta.accessory.CustomizeEntity;
import org.dbflute.dbmeta.accessory.MappingValueType;
import org.docksidestage.postgresql.dbflute.allcommon.CDef;
import org.docksidestage.postgresql.dbflute.exentity.customize.*;
/**
* The entity of VendorCheckCursor. <br>
* <pre>
* [primary-key]
*
*
* [column]
* vendor_check_id, type_of_char, type_of_varchar, type_of_vc_array, type_of_text, type_of_numeric_integer, type_of_numeric_bigint, type_of_numeric_decimal, type_of_decimal, type_of_int, type_of_int_array, type_of_int4, type_of_int4_array, type_of_int8, type_of_int8_array, type_of_bigint, type_of_real, type_of_float, type_of_money, type_of_date, type_of_timestamp, type_of_time, type_of_timetz, type_of_interval, type_of_bool, type_of_bit, type_of_bytea, type_of_oid, type_of_uuid, type_of_xml, type_of_json
*
* [sequence]
*
*
* [identity]
*
*
* [version-no]
*
*
* [foreign table]
*
*
* [referrer table]
*
*
* [foreign property]
*
*
* [referrer property]
*
*
* [get/set template]
* /= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
* Long vendorCheckId = entity.getVendorCheckId();
* String typeOfChar = entity.getTypeOfChar();
* String typeOfVarchar = entity.getTypeOfVarchar();
* org.docksidestage.postgresql.mytype.MyArray typeOfVcArray = entity.getTypeOfVcArray();
* String typeOfText = entity.getTypeOfText();
* Integer typeOfNumericInteger = entity.getTypeOfNumericInteger();
* Long typeOfNumericBigint = entity.getTypeOfNumericBigint();
* java.math.BigDecimal typeOfNumericDecimal = entity.getTypeOfNumericDecimal();
* java.math.BigDecimal typeOfDecimal = entity.getTypeOfDecimal();
* Integer typeOfInt = entity.getTypeOfInt();
* org.docksidestage.postgresql.mytype.MyArray typeOfIntArray = entity.getTypeOfIntArray();
* Integer typeOfInt4 = entity.getTypeOfInt4();
* org.docksidestage.postgresql.mytype.MyArray typeOfInt4Array = entity.getTypeOfInt4Array();
* Long typeOfInt8 = entity.getTypeOfInt8();
* org.docksidestage.postgresql.mytype.MyArray typeOfInt8Array = entity.getTypeOfInt8Array();
* Long typeOfBigint = entity.getTypeOfBigint();
* java.math.BigDecimal typeOfReal = entity.getTypeOfReal();
* java.math.BigDecimal typeOfFloat = entity.getTypeOfFloat();
* java.math.BigDecimal typeOfMoney = entity.getTypeOfMoney();
* java.time.LocalDate typeOfDate = entity.getTypeOfDate();
* java.time.LocalDateTime typeOfTimestamp = entity.getTypeOfTimestamp();
* java.time.LocalTime typeOfTime = entity.getTypeOfTime();
* java.time.LocalTime typeOfTimetz = entity.getTypeOfTimetz();
* String typeOfInterval = entity.getTypeOfInterval();
* Boolean typeOfBool = entity.getTypeOfBool();
* Boolean typeOfBit = entity.getTypeOfBit();
* byte[] typeOfBytea = entity.getTypeOfBytea();
* byte[] typeOfOid = entity.getTypeOfOid();
* java.util.UUID typeOfUuid = entity.getTypeOfUuid();
* org.docksidestage.postgresql.mytype.MyXML typeOfXml = entity.getTypeOfXml();
* org.docksidestage.postgresql.mytype.MyJSON typeOfJson = entity.getTypeOfJson();
* entity.setVendorCheckId(vendorCheckId);
* entity.setTypeOfChar(typeOfChar);
* entity.setTypeOfVarchar(typeOfVarchar);
* entity.setTypeOfVcArray(typeOfVcArray);
* entity.setTypeOfText(typeOfText);
* entity.setTypeOfNumericInteger(typeOfNumericInteger);
* entity.setTypeOfNumericBigint(typeOfNumericBigint);
* entity.setTypeOfNumericDecimal(typeOfNumericDecimal);
* entity.setTypeOfDecimal(typeOfDecimal);
* entity.setTypeOfInt(typeOfInt);
* entity.setTypeOfIntArray(typeOfIntArray);
* entity.setTypeOfInt4(typeOfInt4);
* entity.setTypeOfInt4Array(typeOfInt4Array);
* entity.setTypeOfInt8(typeOfInt8);
* entity.setTypeOfInt8Array(typeOfInt8Array);
* entity.setTypeOfBigint(typeOfBigint);
* entity.setTypeOfReal(typeOfReal);
* entity.setTypeOfFloat(typeOfFloat);
* entity.setTypeOfMoney(typeOfMoney);
* entity.setTypeOfDate(typeOfDate);
* entity.setTypeOfTimestamp(typeOfTimestamp);
* entity.setTypeOfTime(typeOfTime);
* entity.setTypeOfTimetz(typeOfTimetz);
* entity.setTypeOfInterval(typeOfInterval);
* entity.setTypeOfBool(typeOfBool);
* entity.setTypeOfBit(typeOfBit);
* entity.setTypeOfBytea(typeOfBytea);
* entity.setTypeOfOid(typeOfOid);
* entity.setTypeOfUuid(typeOfUuid);
* entity.setTypeOfXml(typeOfXml);
* entity.setTypeOfJson(typeOfJson);
* = = = = = = = = = =/
* </pre>
* @author DBFlute(AutoGenerator)
*/
public abstract class BsVendorCheckCursor extends AbstractEntity implements CustomizeEntity {
// ===================================================================================
// Definition
// ==========
/** The serial version UID for object serialization. (Default) */
private static final long serialVersionUID = 1L;
// ===================================================================================
// Attribute
// =========
/** vendor_check_id: {numeric(16), refers to vendor_check.vendor_check_id} */
protected Long _vendorCheckId;
/** type_of_char: {bpchar(3), refers to vendor_check.type_of_char} */
protected String _typeOfChar;
/** type_of_varchar: {varchar(2147483647), refers to vendor_check.type_of_varchar} */
protected String _typeOfVarchar;
/** type_of_vc_array: {_varchar(2147483647), refers to vendor_check.type_of_vc_array} */
protected org.docksidestage.postgresql.mytype.MyArray _typeOfVcArray;
/** type_of_text: {text(2147483647), refers to vendor_check.type_of_text} */
protected String _typeOfText;
/** type_of_numeric_integer: {numeric(5), refers to vendor_check.type_of_numeric_integer} */
protected Integer _typeOfNumericInteger;
/** type_of_numeric_bigint: {numeric(12), refers to vendor_check.type_of_numeric_bigint} */
protected Long _typeOfNumericBigint;
/** type_of_numeric_decimal: {numeric(5, 3), refers to vendor_check.type_of_numeric_decimal} */
protected java.math.BigDecimal _typeOfNumericDecimal;
/** type_of_decimal: {numeric(131089), refers to vendor_check.type_of_decimal} */
protected java.math.BigDecimal _typeOfDecimal;
/** type_of_int: {int4(10), refers to vendor_check.type_of_int} */
protected Integer _typeOfInt;
/** type_of_int_array: {_int4(10), refers to vendor_check.type_of_int_array} */
protected org.docksidestage.postgresql.mytype.MyArray _typeOfIntArray;
/** type_of_int4: {int4(10), refers to vendor_check.type_of_int4} */
protected Integer _typeOfInt4;
/** type_of_int4_array: {_int4(10), refers to vendor_check.type_of_int4_array} */
protected org.docksidestage.postgresql.mytype.MyArray _typeOfInt4Array;
/** type_of_int8: {int8(19), refers to vendor_check.type_of_int8} */
protected Long _typeOfInt8;
/** type_of_int8_array: {_int8(19), refers to vendor_check.type_of_int8_array} */
protected org.docksidestage.postgresql.mytype.MyArray _typeOfInt8Array;
/** type_of_bigint: {int8(19), refers to vendor_check.type_of_bigint} */
protected Long _typeOfBigint;
/** type_of_real: {float4(8, 8), refers to vendor_check.type_of_real} */
protected java.math.BigDecimal _typeOfReal;
/** type_of_float: {float8(17, 17), refers to vendor_check.type_of_float} */
protected java.math.BigDecimal _typeOfFloat;
/** type_of_money: {money(2147483647), refers to vendor_check.type_of_money} */
protected java.math.BigDecimal _typeOfMoney;
/** type_of_date: {date(13), refers to vendor_check.type_of_date} */
protected java.time.LocalDate _typeOfDate;
/** type_of_timestamp: {timestamp(29, 6), refers to vendor_check.type_of_timestamp} */
protected java.time.LocalDateTime _typeOfTimestamp;
/** type_of_time: {time(15, 6), refers to vendor_check.type_of_time} */
protected java.time.LocalTime _typeOfTime;
/** type_of_timetz: {timetz(21, 6), refers to vendor_check.type_of_timetz} */
protected java.time.LocalTime _typeOfTimetz;
/** type_of_interval: {interval(49, 6), refers to vendor_check.type_of_interval} */
protected String _typeOfInterval;
/** type_of_bool: {bool(1), refers to vendor_check.type_of_bool, classification=TrueFalse} */
protected Boolean _typeOfBool;
/** type_of_bit: {bit(1), refers to vendor_check.type_of_bit} */
protected Boolean _typeOfBit;
/** type_of_bytea: {bytea(2147483647), refers to vendor_check.type_of_bytea} */
protected byte[] _typeOfBytea;
/** type_of_oid: {oid(10), refers to vendor_check.type_of_oid} */
protected byte[] _typeOfOid;
/** type_of_uuid: {uuid(2147483647), refers to vendor_check.type_of_uuid} */
protected java.util.UUID _typeOfUuid;
/** type_of_xml: {xml(2147483647), refers to vendor_check.type_of_xml} */
protected org.docksidestage.postgresql.mytype.MyXML _typeOfXml;
/** type_of_json: {json(2147483647), refers to vendor_check.type_of_json} */
protected org.docksidestage.postgresql.mytype.MyJSON _typeOfJson;
// ===================================================================================
// DB Meta
// =======
/** {@inheritDoc} */
public DBMeta asDBMeta() {
return org.docksidestage.postgresql.dbflute.bsentity.customize.dbmeta.VendorCheckCursorDbm.getInstance();
}
/** {@inheritDoc} */
public String asTableDbName() {
return "VendorCheckCursor";
}
// ===================================================================================
// Key Handling
// ============
/** {@inheritDoc} */
public boolean hasPrimaryKeyValue() {
return false;
}
// ===================================================================================
// Classification Property
// =======================
/**
* Get the value of typeOfBool as the classification of TrueFalse. <br>
* type_of_bool: {bool(1), refers to vendor_check.type_of_bool, classification=TrueFalse} <br>
* boolean type classification
* <p>It's treated as case insensitive and if the code value is null, it returns null.</p>
* @return The instance of classification definition (as ENUM type). (NullAllowed: when the column value is null)
*/
public CDef.TrueFalse getTypeOfBoolAsTrueFalse() {
return CDef.TrueFalse.codeOf(getTypeOfBool());
}
/**
* Set the value of typeOfBool as the classification of TrueFalse. <br>
* type_of_bool: {bool(1), refers to vendor_check.type_of_bool, classification=TrueFalse} <br>
* boolean type classification
* @param cdef The instance of classification definition (as ENUM type). (NullAllowed: if null, null value is set to the column)
*/
public void setTypeOfBoolAsTrueFalse(CDef.TrueFalse cdef) {
setTypeOfBool(cdef != null ? toBoolean(cdef.code()) : null);
}
// ===================================================================================
// Classification Setting
// ======================
/**
* Set the value of typeOfBool as True (true). <br>
* Yes: means valid
*/
public void setTypeOfBool_True() {
setTypeOfBoolAsTrueFalse(CDef.TrueFalse.True);
}
/**
* Set the value of typeOfBool as False (false). <br>
* No: means invalid
*/
public void setTypeOfBool_False() {
setTypeOfBoolAsTrueFalse(CDef.TrueFalse.False);
}
// ===================================================================================
// Classification Determination
// ============================
/**
* Is the value of typeOfBool True? <br>
* Yes: means valid
* <p>It's treated as case insensitive and if the code value is null, it returns false.</p>
* @return The determination, true or false.
*/
public boolean isTypeOfBoolTrue() {
CDef.TrueFalse cdef = getTypeOfBoolAsTrueFalse();
return cdef != null ? cdef.equals(CDef.TrueFalse.True) : false;
}
/**
* Is the value of typeOfBool False? <br>
* No: means invalid
* <p>It's treated as case insensitive and if the code value is null, it returns false.</p>
* @return The determination, true or false.
*/
public boolean isTypeOfBoolFalse() {
CDef.TrueFalse cdef = getTypeOfBoolAsTrueFalse();
return cdef != null ? cdef.equals(CDef.TrueFalse.False) : false;
}
// ===================================================================================
// Classification Name/Alias
// =========================
/**
* Get the value of the column 'typeOfBool' as classification alias.
* @return The string of classification alias. (NullAllowed: when the column value is null)
*/
public String getTypeOfBoolAlias() {
CDef.TrueFalse cdef = getTypeOfBoolAsTrueFalse();
return cdef != null ? cdef.alias() : null;
}
// ===================================================================================
// Foreign Property
// ================
// ===================================================================================
// Referrer Property
// =================
protected <ELEMENT> List<ELEMENT> newReferrerList() { // overriding to import
return new ArrayList<ELEMENT>();
}
// ===================================================================================
// Basic Override
// ==============
@Override
protected boolean doEquals(Object obj) {
if (obj instanceof BsVendorCheckCursor) {
BsVendorCheckCursor other = (BsVendorCheckCursor)obj;
if (!xSV(_vendorCheckId, other._vendorCheckId)) { return false; }
if (!xSV(_typeOfChar, other._typeOfChar)) { return false; }
if (!xSV(_typeOfVarchar, other._typeOfVarchar)) { return false; }
if (!xSV(_typeOfVcArray, other._typeOfVcArray)) { return false; }
if (!xSV(_typeOfText, other._typeOfText)) { return false; }
if (!xSV(_typeOfNumericInteger, other._typeOfNumericInteger)) { return false; }
if (!xSV(_typeOfNumericBigint, other._typeOfNumericBigint)) { return false; }
if (!xSV(_typeOfNumericDecimal, other._typeOfNumericDecimal)) { return false; }
if (!xSV(_typeOfDecimal, other._typeOfDecimal)) { return false; }
if (!xSV(_typeOfInt, other._typeOfInt)) { return false; }
if (!xSV(_typeOfIntArray, other._typeOfIntArray)) { return false; }
if (!xSV(_typeOfInt4, other._typeOfInt4)) { return false; }
if (!xSV(_typeOfInt4Array, other._typeOfInt4Array)) { return false; }
if (!xSV(_typeOfInt8, other._typeOfInt8)) { return false; }
if (!xSV(_typeOfInt8Array, other._typeOfInt8Array)) { return false; }
if (!xSV(_typeOfBigint, other._typeOfBigint)) { return false; }
if (!xSV(_typeOfReal, other._typeOfReal)) { return false; }
if (!xSV(_typeOfFloat, other._typeOfFloat)) { return false; }
if (!xSV(_typeOfMoney, other._typeOfMoney)) { return false; }
if (!xSV(_typeOfDate, other._typeOfDate)) { return false; }
if (!xSV(_typeOfTimestamp, other._typeOfTimestamp)) { return false; }
if (!xSV(_typeOfTime, other._typeOfTime)) { return false; }
if (!xSV(_typeOfTimetz, other._typeOfTimetz)) { return false; }
if (!xSV(_typeOfInterval, other._typeOfInterval)) { return false; }
if (!xSV(_typeOfBool, other._typeOfBool)) { return false; }
if (!xSV(_typeOfBit, other._typeOfBit)) { return false; }
if (!xSV(_typeOfBytea, other._typeOfBytea)) { return false; }
if (!xSV(_typeOfOid, other._typeOfOid)) { return false; }
if (!xSV(_typeOfUuid, other._typeOfUuid)) { return false; }
if (!xSV(_typeOfXml, other._typeOfXml)) { return false; }
if (!xSV(_typeOfJson, other._typeOfJson)) { return false; }
return true;
} else {
return false;
}
}
@Override
protected int doHashCode(int initial) {
int hs = initial;
hs = xCH(hs, asTableDbName());
hs = xCH(hs, _vendorCheckId);
hs = xCH(hs, _typeOfChar);
hs = xCH(hs, _typeOfVarchar);
hs = xCH(hs, _typeOfVcArray);
hs = xCH(hs, _typeOfText);
hs = xCH(hs, _typeOfNumericInteger);
hs = xCH(hs, _typeOfNumericBigint);
hs = xCH(hs, _typeOfNumericDecimal);
hs = xCH(hs, _typeOfDecimal);
hs = xCH(hs, _typeOfInt);
hs = xCH(hs, _typeOfIntArray);
hs = xCH(hs, _typeOfInt4);
hs = xCH(hs, _typeOfInt4Array);
hs = xCH(hs, _typeOfInt8);
hs = xCH(hs, _typeOfInt8Array);
hs = xCH(hs, _typeOfBigint);
hs = xCH(hs, _typeOfReal);
hs = xCH(hs, _typeOfFloat);
hs = xCH(hs, _typeOfMoney);
hs = xCH(hs, _typeOfDate);
hs = xCH(hs, _typeOfTimestamp);
hs = xCH(hs, _typeOfTime);
hs = xCH(hs, _typeOfTimetz);
hs = xCH(hs, _typeOfInterval);
hs = xCH(hs, _typeOfBool);
hs = xCH(hs, _typeOfBit);
hs = xCH(hs, _typeOfBytea);
hs = xCH(hs, _typeOfOid);
hs = xCH(hs, _typeOfUuid);
hs = xCH(hs, _typeOfXml);
hs = xCH(hs, _typeOfJson);
return hs;
}
@Override
protected String doBuildStringWithRelation(String li) {
return "";
}
@Override
protected String doBuildColumnString(String dm) {
StringBuilder sb = new StringBuilder();
sb.append(dm).append(xfND(_vendorCheckId));
sb.append(dm).append(xfND(_typeOfChar));
sb.append(dm).append(xfND(_typeOfVarchar));
sb.append(dm).append(xfND(_typeOfVcArray));
sb.append(dm).append(xfND(_typeOfText));
sb.append(dm).append(xfND(_typeOfNumericInteger));
sb.append(dm).append(xfND(_typeOfNumericBigint));
sb.append(dm).append(xfND(_typeOfNumericDecimal));
sb.append(dm).append(xfND(_typeOfDecimal));
sb.append(dm).append(xfND(_typeOfInt));
sb.append(dm).append(xfND(_typeOfIntArray));
sb.append(dm).append(xfND(_typeOfInt4));
sb.append(dm).append(xfND(_typeOfInt4Array));
sb.append(dm).append(xfND(_typeOfInt8));
sb.append(dm).append(xfND(_typeOfInt8Array));
sb.append(dm).append(xfND(_typeOfBigint));
sb.append(dm).append(xfND(_typeOfReal));
sb.append(dm).append(xfND(_typeOfFloat));
sb.append(dm).append(xfND(_typeOfMoney));
sb.append(dm).append(xfND(_typeOfDate));
sb.append(dm).append(xfND(_typeOfTimestamp));
sb.append(dm).append(xfND(_typeOfTime));
sb.append(dm).append(xfND(_typeOfTimetz));
sb.append(dm).append(xfND(_typeOfInterval));
sb.append(dm).append(xfND(_typeOfBool));
sb.append(dm).append(xfND(_typeOfBit));
sb.append(dm).append(xfBA(_typeOfBytea));
sb.append(dm).append(xfBA(_typeOfOid));
sb.append(dm).append(xfND(_typeOfUuid));
sb.append(dm).append(xfND(_typeOfXml));
sb.append(dm).append(xfND(_typeOfJson));
if (sb.length() > dm.length()) {
sb.delete(0, dm.length());
}
sb.insert(0, "{").append("}");
return sb.toString();
}
@Override
protected String doBuildRelationString(String dm) {
return "";
}
@Override
public VendorCheckCursor clone() {
return (VendorCheckCursor)super.clone();
}
// ===================================================================================
// Accessor
// ========
/**
* [get] vendor_check_id: {numeric(16), refers to vendor_check.vendor_check_id} <br>
* @return The value of the column 'vendor_check_id'. (NullAllowed even if selected: for no constraint)
*/
public Long getVendorCheckId() {
checkSpecifiedProperty("vendorCheckId");
return _vendorCheckId;
}
/**
* [set] vendor_check_id: {numeric(16), refers to vendor_check.vendor_check_id} <br>
* @param vendorCheckId The value of the column 'vendor_check_id'. (NullAllowed: null update allowed for no constraint)
*/
public void setVendorCheckId(Long vendorCheckId) {
registerModifiedProperty("vendorCheckId");
_vendorCheckId = vendorCheckId;
}
/**
* [get] type_of_char: {bpchar(3), refers to vendor_check.type_of_char} <br>
* @return The value of the column 'type_of_char'. (NullAllowed even if selected: for no constraint)
*/
public String getTypeOfChar() {
checkSpecifiedProperty("typeOfChar");
return _typeOfChar;
}
/**
* [set] type_of_char: {bpchar(3), refers to vendor_check.type_of_char} <br>
* @param typeOfChar The value of the column 'type_of_char'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfChar(String typeOfChar) {
registerModifiedProperty("typeOfChar");
_typeOfChar = typeOfChar;
}
/**
* [get] type_of_varchar: {varchar(2147483647), refers to vendor_check.type_of_varchar} <br>
* @return The value of the column 'type_of_varchar'. (NullAllowed even if selected: for no constraint)
*/
public String getTypeOfVarchar() {
checkSpecifiedProperty("typeOfVarchar");
return _typeOfVarchar;
}
/**
* [set] type_of_varchar: {varchar(2147483647), refers to vendor_check.type_of_varchar} <br>
* @param typeOfVarchar The value of the column 'type_of_varchar'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfVarchar(String typeOfVarchar) {
registerModifiedProperty("typeOfVarchar");
_typeOfVarchar = typeOfVarchar;
}
/**
* [get] type_of_vc_array: {_varchar(2147483647), refers to vendor_check.type_of_vc_array} <br>
* @return The value of the column 'type_of_vc_array'. (NullAllowed even if selected: for no constraint)
*/
public org.docksidestage.postgresql.mytype.MyArray getTypeOfVcArray() {
checkSpecifiedProperty("typeOfVcArray");
return _typeOfVcArray;
}
/**
* [set] type_of_vc_array: {_varchar(2147483647), refers to vendor_check.type_of_vc_array} <br>
* @param typeOfVcArray The value of the column 'type_of_vc_array'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfVcArray(org.docksidestage.postgresql.mytype.MyArray typeOfVcArray) {
registerModifiedProperty("typeOfVcArray");
_typeOfVcArray = typeOfVcArray;
}
/**
* [get] type_of_text: {text(2147483647), refers to vendor_check.type_of_text} <br>
* @return The value of the column 'type_of_text'. (NullAllowed even if selected: for no constraint)
*/
public String getTypeOfText() {
checkSpecifiedProperty("typeOfText");
return _typeOfText;
}
/**
* [set] type_of_text: {text(2147483647), refers to vendor_check.type_of_text} <br>
* @param typeOfText The value of the column 'type_of_text'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfText(String typeOfText) {
registerModifiedProperty("typeOfText");
_typeOfText = typeOfText;
}
/**
* [get] type_of_numeric_integer: {numeric(5), refers to vendor_check.type_of_numeric_integer} <br>
* @return The value of the column 'type_of_numeric_integer'. (NullAllowed even if selected: for no constraint)
*/
public Integer getTypeOfNumericInteger() {
checkSpecifiedProperty("typeOfNumericInteger");
return _typeOfNumericInteger;
}
/**
* [set] type_of_numeric_integer: {numeric(5), refers to vendor_check.type_of_numeric_integer} <br>
* @param typeOfNumericInteger The value of the column 'type_of_numeric_integer'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfNumericInteger(Integer typeOfNumericInteger) {
registerModifiedProperty("typeOfNumericInteger");
_typeOfNumericInteger = typeOfNumericInteger;
}
/**
* [get] type_of_numeric_bigint: {numeric(12), refers to vendor_check.type_of_numeric_bigint} <br>
* @return The value of the column 'type_of_numeric_bigint'. (NullAllowed even if selected: for no constraint)
*/
public Long getTypeOfNumericBigint() {
checkSpecifiedProperty("typeOfNumericBigint");
return _typeOfNumericBigint;
}
/**
* [set] type_of_numeric_bigint: {numeric(12), refers to vendor_check.type_of_numeric_bigint} <br>
* @param typeOfNumericBigint The value of the column 'type_of_numeric_bigint'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfNumericBigint(Long typeOfNumericBigint) {
registerModifiedProperty("typeOfNumericBigint");
_typeOfNumericBigint = typeOfNumericBigint;
}
/**
* [get] type_of_numeric_decimal: {numeric(5, 3), refers to vendor_check.type_of_numeric_decimal} <br>
* @return The value of the column 'type_of_numeric_decimal'. (NullAllowed even if selected: for no constraint)
*/
public java.math.BigDecimal getTypeOfNumericDecimal() {
checkSpecifiedProperty("typeOfNumericDecimal");
return _typeOfNumericDecimal;
}
/**
* [set] type_of_numeric_decimal: {numeric(5, 3), refers to vendor_check.type_of_numeric_decimal} <br>
* @param typeOfNumericDecimal The value of the column 'type_of_numeric_decimal'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfNumericDecimal(java.math.BigDecimal typeOfNumericDecimal) {
registerModifiedProperty("typeOfNumericDecimal");
_typeOfNumericDecimal = typeOfNumericDecimal;
}
/**
* [get] type_of_decimal: {numeric(131089), refers to vendor_check.type_of_decimal} <br>
* @return The value of the column 'type_of_decimal'. (NullAllowed even if selected: for no constraint)
*/
public java.math.BigDecimal getTypeOfDecimal() {
checkSpecifiedProperty("typeOfDecimal");
return _typeOfDecimal;
}
/**
* [set] type_of_decimal: {numeric(131089), refers to vendor_check.type_of_decimal} <br>
* @param typeOfDecimal The value of the column 'type_of_decimal'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfDecimal(java.math.BigDecimal typeOfDecimal) {
registerModifiedProperty("typeOfDecimal");
_typeOfDecimal = typeOfDecimal;
}
/**
* [get] type_of_int: {int4(10), refers to vendor_check.type_of_int} <br>
* @return The value of the column 'type_of_int'. (NullAllowed even if selected: for no constraint)
*/
public Integer getTypeOfInt() {
checkSpecifiedProperty("typeOfInt");
return _typeOfInt;
}
/**
* [set] type_of_int: {int4(10), refers to vendor_check.type_of_int} <br>
* @param typeOfInt The value of the column 'type_of_int'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfInt(Integer typeOfInt) {
registerModifiedProperty("typeOfInt");
_typeOfInt = typeOfInt;
}
/**
* [get] type_of_int_array: {_int4(10), refers to vendor_check.type_of_int_array} <br>
* @return The value of the column 'type_of_int_array'. (NullAllowed even if selected: for no constraint)
*/
public org.docksidestage.postgresql.mytype.MyArray getTypeOfIntArray() {
checkSpecifiedProperty("typeOfIntArray");
return _typeOfIntArray;
}
/**
* [set] type_of_int_array: {_int4(10), refers to vendor_check.type_of_int_array} <br>
* @param typeOfIntArray The value of the column 'type_of_int_array'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfIntArray(org.docksidestage.postgresql.mytype.MyArray typeOfIntArray) {
registerModifiedProperty("typeOfIntArray");
_typeOfIntArray = typeOfIntArray;
}
/**
* [get] type_of_int4: {int4(10), refers to vendor_check.type_of_int4} <br>
* @return The value of the column 'type_of_int4'. (NullAllowed even if selected: for no constraint)
*/
public Integer getTypeOfInt4() {
checkSpecifiedProperty("typeOfInt4");
return _typeOfInt4;
}
/**
* [set] type_of_int4: {int4(10), refers to vendor_check.type_of_int4} <br>
* @param typeOfInt4 The value of the column 'type_of_int4'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfInt4(Integer typeOfInt4) {
registerModifiedProperty("typeOfInt4");
_typeOfInt4 = typeOfInt4;
}
/**
* [get] type_of_int4_array: {_int4(10), refers to vendor_check.type_of_int4_array} <br>
* @return The value of the column 'type_of_int4_array'. (NullAllowed even if selected: for no constraint)
*/
public org.docksidestage.postgresql.mytype.MyArray getTypeOfInt4Array() {
checkSpecifiedProperty("typeOfInt4Array");
return _typeOfInt4Array;
}
/**
* [set] type_of_int4_array: {_int4(10), refers to vendor_check.type_of_int4_array} <br>
* @param typeOfInt4Array The value of the column 'type_of_int4_array'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfInt4Array(org.docksidestage.postgresql.mytype.MyArray typeOfInt4Array) {
registerModifiedProperty("typeOfInt4Array");
_typeOfInt4Array = typeOfInt4Array;
}
/**
* [get] type_of_int8: {int8(19), refers to vendor_check.type_of_int8} <br>
* @return The value of the column 'type_of_int8'. (NullAllowed even if selected: for no constraint)
*/
public Long getTypeOfInt8() {
checkSpecifiedProperty("typeOfInt8");
return _typeOfInt8;
}
/**
* [set] type_of_int8: {int8(19), refers to vendor_check.type_of_int8} <br>
* @param typeOfInt8 The value of the column 'type_of_int8'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfInt8(Long typeOfInt8) {
registerModifiedProperty("typeOfInt8");
_typeOfInt8 = typeOfInt8;
}
/**
* [get] type_of_int8_array: {_int8(19), refers to vendor_check.type_of_int8_array} <br>
* @return The value of the column 'type_of_int8_array'. (NullAllowed even if selected: for no constraint)
*/
public org.docksidestage.postgresql.mytype.MyArray getTypeOfInt8Array() {
checkSpecifiedProperty("typeOfInt8Array");
return _typeOfInt8Array;
}
/**
* [set] type_of_int8_array: {_int8(19), refers to vendor_check.type_of_int8_array} <br>
* @param typeOfInt8Array The value of the column 'type_of_int8_array'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfInt8Array(org.docksidestage.postgresql.mytype.MyArray typeOfInt8Array) {
registerModifiedProperty("typeOfInt8Array");
_typeOfInt8Array = typeOfInt8Array;
}
/**
* [get] type_of_bigint: {int8(19), refers to vendor_check.type_of_bigint} <br>
* @return The value of the column 'type_of_bigint'. (NullAllowed even if selected: for no constraint)
*/
public Long getTypeOfBigint() {
checkSpecifiedProperty("typeOfBigint");
return _typeOfBigint;
}
/**
* [set] type_of_bigint: {int8(19), refers to vendor_check.type_of_bigint} <br>
* @param typeOfBigint The value of the column 'type_of_bigint'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfBigint(Long typeOfBigint) {
registerModifiedProperty("typeOfBigint");
_typeOfBigint = typeOfBigint;
}
/**
* [get] type_of_real: {float4(8, 8), refers to vendor_check.type_of_real} <br>
* @return The value of the column 'type_of_real'. (NullAllowed even if selected: for no constraint)
*/
public java.math.BigDecimal getTypeOfReal() {
checkSpecifiedProperty("typeOfReal");
return _typeOfReal;
}
/**
* [set] type_of_real: {float4(8, 8), refers to vendor_check.type_of_real} <br>
* @param typeOfReal The value of the column 'type_of_real'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfReal(java.math.BigDecimal typeOfReal) {
registerModifiedProperty("typeOfReal");
_typeOfReal = typeOfReal;
}
/**
* [get] type_of_float: {float8(17, 17), refers to vendor_check.type_of_float} <br>
* @return The value of the column 'type_of_float'. (NullAllowed even if selected: for no constraint)
*/
public java.math.BigDecimal getTypeOfFloat() {
checkSpecifiedProperty("typeOfFloat");
return _typeOfFloat;
}
/**
* [set] type_of_float: {float8(17, 17), refers to vendor_check.type_of_float} <br>
* @param typeOfFloat The value of the column 'type_of_float'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfFloat(java.math.BigDecimal typeOfFloat) {
registerModifiedProperty("typeOfFloat");
_typeOfFloat = typeOfFloat;
}
/**
* [get] type_of_money: {money(2147483647), refers to vendor_check.type_of_money} <br>
* @return The value of the column 'type_of_money'. (NullAllowed even if selected: for no constraint)
*/
public java.math.BigDecimal getTypeOfMoney() {
checkSpecifiedProperty("typeOfMoney");
return _typeOfMoney;
}
/**
* [set] type_of_money: {money(2147483647), refers to vendor_check.type_of_money} <br>
* @param typeOfMoney The value of the column 'type_of_money'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfMoney(java.math.BigDecimal typeOfMoney) {
registerModifiedProperty("typeOfMoney");
_typeOfMoney = typeOfMoney;
}
/**
* [get] type_of_date: {date(13), refers to vendor_check.type_of_date} <br>
* @return The value of the column 'type_of_date'. (NullAllowed even if selected: for no constraint)
*/
public java.time.LocalDate getTypeOfDate() {
checkSpecifiedProperty("typeOfDate");
return _typeOfDate;
}
/**
* [set] type_of_date: {date(13), refers to vendor_check.type_of_date} <br>
* @param typeOfDate The value of the column 'type_of_date'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfDate(java.time.LocalDate typeOfDate) {
registerModifiedProperty("typeOfDate");
_typeOfDate = typeOfDate;
}
/**
* [get] type_of_timestamp: {timestamp(29, 6), refers to vendor_check.type_of_timestamp} <br>
* @return The value of the column 'type_of_timestamp'. (NullAllowed even if selected: for no constraint)
*/
public java.time.LocalDateTime getTypeOfTimestamp() {
checkSpecifiedProperty("typeOfTimestamp");
return _typeOfTimestamp;
}
/**
* [set] type_of_timestamp: {timestamp(29, 6), refers to vendor_check.type_of_timestamp} <br>
* @param typeOfTimestamp The value of the column 'type_of_timestamp'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfTimestamp(java.time.LocalDateTime typeOfTimestamp) {
registerModifiedProperty("typeOfTimestamp");
_typeOfTimestamp = typeOfTimestamp;
}
/**
* [get] type_of_time: {time(15, 6), refers to vendor_check.type_of_time} <br>
* @return The value of the column 'type_of_time'. (NullAllowed even if selected: for no constraint)
*/
public java.time.LocalTime getTypeOfTime() {
checkSpecifiedProperty("typeOfTime");
return _typeOfTime;
}
/**
* [set] type_of_time: {time(15, 6), refers to vendor_check.type_of_time} <br>
* @param typeOfTime The value of the column 'type_of_time'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfTime(java.time.LocalTime typeOfTime) {
registerModifiedProperty("typeOfTime");
_typeOfTime = typeOfTime;
}
/**
* [get] type_of_timetz: {timetz(21, 6), refers to vendor_check.type_of_timetz} <br>
* @return The value of the column 'type_of_timetz'. (NullAllowed even if selected: for no constraint)
*/
public java.time.LocalTime getTypeOfTimetz() {
checkSpecifiedProperty("typeOfTimetz");
return _typeOfTimetz;
}
/**
* [set] type_of_timetz: {timetz(21, 6), refers to vendor_check.type_of_timetz} <br>
* @param typeOfTimetz The value of the column 'type_of_timetz'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfTimetz(java.time.LocalTime typeOfTimetz) {
registerModifiedProperty("typeOfTimetz");
_typeOfTimetz = typeOfTimetz;
}
/**
* [get] type_of_interval: {interval(49, 6), refers to vendor_check.type_of_interval} <br>
* @return The value of the column 'type_of_interval'. (NullAllowed even if selected: for no constraint)
*/
public String getTypeOfInterval() {
checkSpecifiedProperty("typeOfInterval");
return _typeOfInterval;
}
/**
* [set] type_of_interval: {interval(49, 6), refers to vendor_check.type_of_interval} <br>
* @param typeOfInterval The value of the column 'type_of_interval'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfInterval(String typeOfInterval) {
registerModifiedProperty("typeOfInterval");
_typeOfInterval = typeOfInterval;
}
/**
* [get] type_of_bool: {bool(1), refers to vendor_check.type_of_bool, classification=TrueFalse} <br>
* @return The value of the column 'type_of_bool'. (NullAllowed even if selected: for no constraint)
*/
public Boolean getTypeOfBool() {
checkSpecifiedProperty("typeOfBool");
return _typeOfBool;
}
/**
* [set] type_of_bool: {bool(1), refers to vendor_check.type_of_bool, classification=TrueFalse} <br>
* @param typeOfBool The value of the column 'type_of_bool'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfBool(Boolean typeOfBool) {
checkClassificationCode("type_of_bool", CDef.DefMeta.TrueFalse, typeOfBool);
registerModifiedProperty("typeOfBool");
_typeOfBool = typeOfBool;
}
/**
* [get] type_of_bit: {bit(1), refers to vendor_check.type_of_bit} <br>
* @return The value of the column 'type_of_bit'. (NullAllowed even if selected: for no constraint)
*/
public Boolean getTypeOfBit() {
checkSpecifiedProperty("typeOfBit");
return _typeOfBit;
}
/**
* [set] type_of_bit: {bit(1), refers to vendor_check.type_of_bit} <br>
* @param typeOfBit The value of the column 'type_of_bit'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfBit(Boolean typeOfBit) {
registerModifiedProperty("typeOfBit");
_typeOfBit = typeOfBit;
}
/**
* [get] type_of_bytea: {bytea(2147483647), refers to vendor_check.type_of_bytea} <br>
* @return The value of the column 'type_of_bytea'. (NullAllowed even if selected: for no constraint)
*/
@MappingValueType(keyName = "postgreSqlByteaType")
public byte[] getTypeOfBytea() {
checkSpecifiedProperty("typeOfBytea");
return _typeOfBytea;
}
/**
* [set] type_of_bytea: {bytea(2147483647), refers to vendor_check.type_of_bytea} <br>
* @param typeOfBytea The value of the column 'type_of_bytea'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfBytea(byte[] typeOfBytea) {
registerModifiedProperty("typeOfBytea");
_typeOfBytea = typeOfBytea;
}
/**
* [get] type_of_oid: {oid(10), refers to vendor_check.type_of_oid} <br>
* @return The value of the column 'type_of_oid'. (NullAllowed even if selected: for no constraint)
*/
@MappingValueType(keyName = "postgreSqlOidType")
public byte[] getTypeOfOid() {
checkSpecifiedProperty("typeOfOid");
return _typeOfOid;
}
/**
* [set] type_of_oid: {oid(10), refers to vendor_check.type_of_oid} <br>
* @param typeOfOid The value of the column 'type_of_oid'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfOid(byte[] typeOfOid) {
registerModifiedProperty("typeOfOid");
_typeOfOid = typeOfOid;
}
/**
* [get] type_of_uuid: {uuid(2147483647), refers to vendor_check.type_of_uuid} <br>
* @return The value of the column 'type_of_uuid'. (NullAllowed even if selected: for no constraint)
*/
public java.util.UUID getTypeOfUuid() {
checkSpecifiedProperty("typeOfUuid");
return _typeOfUuid;
}
/**
* [set] type_of_uuid: {uuid(2147483647), refers to vendor_check.type_of_uuid} <br>
* @param typeOfUuid The value of the column 'type_of_uuid'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfUuid(java.util.UUID typeOfUuid) {
registerModifiedProperty("typeOfUuid");
_typeOfUuid = typeOfUuid;
}
/**
* [get] type_of_xml: {xml(2147483647), refers to vendor_check.type_of_xml} <br>
* @return The value of the column 'type_of_xml'. (NullAllowed even if selected: for no constraint)
*/
public org.docksidestage.postgresql.mytype.MyXML getTypeOfXml() {
checkSpecifiedProperty("typeOfXml");
return _typeOfXml;
}
/**
* [set] type_of_xml: {xml(2147483647), refers to vendor_check.type_of_xml} <br>
* @param typeOfXml The value of the column 'type_of_xml'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfXml(org.docksidestage.postgresql.mytype.MyXML typeOfXml) {
registerModifiedProperty("typeOfXml");
_typeOfXml = typeOfXml;
}
/**
* [get] type_of_json: {json(2147483647), refers to vendor_check.type_of_json} <br>
* @return The value of the column 'type_of_json'. (NullAllowed even if selected: for no constraint)
*/
public org.docksidestage.postgresql.mytype.MyJSON getTypeOfJson() {
checkSpecifiedProperty("typeOfJson");
return _typeOfJson;
}
/**
* [set] type_of_json: {json(2147483647), refers to vendor_check.type_of_json} <br>
* @param typeOfJson The value of the column 'type_of_json'. (NullAllowed: null update allowed for no constraint)
*/
public void setTypeOfJson(org.docksidestage.postgresql.mytype.MyJSON typeOfJson) {
registerModifiedProperty("typeOfJson");
_typeOfJson = typeOfJson;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created on Mar 3, 2006
*
* TODO To change the template for this generated file go to
* Window - Preferences - Java - Code Style - Code Templates
*/
package com.gemstone.gemfire.internal.cache;
import com.gemstone.gemfire.cache.DiskStoreFactory;
import java.io.File;
import java.util.Arrays;
/**
* A properties object used to create persistent/overflow regions for
* testing objects
*
* @author Mitul
* @since 5.1
*
*/
public class DiskRegionProperties
{
private boolean isPersistBackup = false;
private boolean isOverflow = false;
private int overFlowCapacity = 1000;
private int compactionThreshold = 50;
private File[] diskDirs;
private int[] diskDirSize;
private long timeInterval = -1L;
private long bytesThreshold = 0L;
private boolean isRolling = true;
private boolean allowForceCompaction = false;
private long maxOplogSize = 1024*1024*1024*10L;
private boolean isSynchronous = false;
private String regionName = "testRegion";
private int concurrencyLevel = 16;
private int initialCapacity = 16;
private float loadFactor = 0.75f;
private boolean statisticsEnabled = false;
public DiskRegionProperties() {
}
public long getBytesThreshold()
{
return bytesThreshold;
}
public File[] getDiskDirs()
{
return diskDirs;
}
public boolean isOverflow()
{
return isOverflow;
}
public boolean isPersistBackup()
{
return isPersistBackup;
}
public boolean isRolling()
{
return isRolling;
}
public boolean getAllowForceCompaction() {
return this.allowForceCompaction;
}
public int getCompactionThreshold() {
return this.compactionThreshold;
}
public boolean isSynchronous()
{
return isSynchronous;
}
public long getMaxOplogSize()
{
return maxOplogSize;
}
public int getOverFlowCapacity()
{
return overFlowCapacity;
}
public long getTimeInterval()
{
return timeInterval;
}
public int[] getDiskDirSizes()
{
return diskDirSize;
}
public void setDiskDirsAndSizes(File[] diskDirs, int[] diskDirSize){
this.diskDirs = diskDirs;
if (diskDirs == null) {
this.diskDirSize = null;
} else {
this.diskDirSize = diskDirSize;
}
}
public void setBytesThreshold(long bytesThreshold)
{
this.bytesThreshold = bytesThreshold;
}
public void setDiskDirs(File[] diskDirs)
{
if (diskDirs == null) {
this.diskDirs = null;
this.diskDirSize = null;
return;
}
this.diskDirs = diskDirs;
}
public void setOverflow(boolean isOverflow)
{
this.isOverflow = isOverflow;
}
public void setPersistBackup(boolean isPersistBackup)
{
this.isPersistBackup = isPersistBackup;
}
public void setRolling(boolean isRolling)
{
this.isRolling = isRolling;
}
public void setAllowForceCompaction(boolean v) {
this.allowForceCompaction = v;
}
public void setCompactionThreshold(int v) {
this.compactionThreshold = v;
}
public void setSynchronous(boolean isSynchronous)
{
this.isSynchronous = isSynchronous;
}
public void setMaxOplogSize(long maxOplogSize)
{
this.maxOplogSize = maxOplogSize;
}
public void setOverFlowCapacity(int overFlowCapacity)
{
this.overFlowCapacity = overFlowCapacity;
}
public void setTimeInterval(long timeInterval)
{
this.timeInterval = timeInterval;
}
public String getRegionName()
{
return regionName;
}
public void setRegionName(String regionName)
{
this.regionName = regionName;
}
public void setStatisticsEnabled(boolean v) {
this.statisticsEnabled = v;
}
public boolean getStatisticsEnabled() {
return this.statisticsEnabled;
}
public void setConcurrencyLevel(int v) {
this.concurrencyLevel = v;
}
public int getConcurrencyLevel() {
return this.concurrencyLevel;
}
public void setInitialCapacity(int v) {
this.initialCapacity = v;
}
public int getInitialCapacity() {
return this.initialCapacity;
}
public void setLoadFactor(float v) {
this.loadFactor = v;
}
public float getLoadFactor() {
return this.loadFactor;
}
}
| |
package com.cs371m.austinrecycle;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.ContentResolver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.location.Address;
import android.location.Geocoder;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Parcelable;
import android.provider.Settings;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.animation.AnimationUtils;
import android.view.inputmethod.InputMethodManager;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.AutoCompleteTextView;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ListAdapter;
import android.widget.TextView;
import android.widget.Toast;
public class MainActivity<ViewGroup> extends Activity {
private static final String TAG = "MainActivity.java";
private EditText _materialEditText;
private ImageButton _searchButton;
private AutoCompleteTextView _locationAutoCompleteTextView;
private CheckBox _currentLocationCheckBox;
private String[] _materialNames;
private TypedArray _icons;
private ArrayList<MaterialItem> _materialItemArray;
private Geocoder _geocoder;
private double _currentLat;
private double _currentLong;
private LocationManager _locationManager;
private AlertDialog _materialListDialog;
private PlacesTask _placesTask;
private boolean[] _oldSelectedItems;
private ArrayList<Integer> _seletedItems;
private boolean _isSearching;
/**
* onCreate
*/
@Override
protected void onCreate(Bundle savedInstanceState) {
Log.d(TAG, "onCreate");
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Initialization
_materialItemArray = new ArrayList<MaterialItem>();
_geocoder = new Geocoder(this, Locale.getDefault());
_materialNames = MainActivity.this.getResources().getStringArray(R.array.list_material_name);
_oldSelectedItems = new boolean[_materialNames.length];
for(int i = 0; i < _oldSelectedItems.length; i++) {
_oldSelectedItems[i] = false;
}
_seletedItems = new ArrayList<Integer>();
_isSearching = false;
if(savedInstanceState != null) {
_oldSelectedItems = savedInstanceState.getBooleanArray("_oldSelectedItems");
_seletedItems = savedInstanceState.getIntegerArrayList("_seletedItems");
}
// Setup _materialEditText to show MaterialDialog when clicked
_materialEditText = (EditText) MainActivity.this.findViewById(R.id.materials_editText);
_materialEditText.setKeyListener(null);
_materialEditText.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
_materialItemArray.clear();
showMaterialDialog();
}
});
// Setup actions when the search button is clicked
_searchButton = (ImageButton) MainActivity.this.findViewById(R.id.search_button);
_searchButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// Users must select at least ONE material
if(_materialEditText.getText().toString().equals("")) {
Toast.makeText(MainActivity.this, "Please select at least ONE material", Toast.LENGTH_SHORT).show();
_materialEditText.requestFocus();
}
else if(_locationAutoCompleteTextView.getText().toString().equals("")) {
showLocationDialog();
}
else {
if(_isSearching == false) {
// Get the latitude and longitude of address entered
try {
String currentAddress = _locationAutoCompleteTextView.getText().toString();
List<Address> returnedAddress = _geocoder.getFromLocationName(currentAddress, 1);
_currentLat = returnedAddress.get(0).getLatitude();
_currentLong = returnedAddress.get(0).getLongitude();
}
catch(IOException e) {
Log.e(TAG, "Error occured in Geocoder: ", e);
}
// Convert to String array to pass as parameter
String[] selectedMaterialArray = _materialEditText.getText().toString().split(",");
// Trim spaces, and format material names to their database attribute names
for(int i = 0; i < selectedMaterialArray.length; ++i) {
selectedMaterialArray[i] = selectedMaterialArray[i].trim().toLowerCase().replace(' ', '_');;
}
// Needs to create a new task every time
new NetworkRequestTask().execute(selectedMaterialArray);
_isSearching = true;
}
else {
Toast.makeText(MainActivity.this, "Austin Recycling is searching for locations now. Please wait.", Toast.LENGTH_SHORT).show();
}
}
}
});
// Location AutoComplete using suggestions from Google Location API
_locationAutoCompleteTextView = (AutoCompleteTextView) MainActivity.this.findViewById(R.id.location_autoCompleteTextView);
_locationAutoCompleteTextView.addTextChangedListener(new TextWatcher() {
public void onTextChanged(CharSequence s, int start, int before, int count) {
_placesTask = new PlacesTask();
_placesTask.execute(s.toString());
}
@Override
public void afterTextChanged(Editable s) {
Log.d(TAG, "afterTextChanged: " + s.toString());
}
@Override
public void beforeTextChanged(CharSequence s, int start, int count,
int after) {
Log.d(TAG, "beforeTextChanged: " + s);
}
});
_locationAutoCompleteTextView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int position, long id) {
_placesTask.cancel(true);
InputMethodManager imm = (InputMethodManager)MainActivity.this.getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(_locationAutoCompleteTextView.getWindowToken(), 0);
_locationAutoCompleteTextView.dismissDropDown();
}
});
// Setup actions to get current location
_currentLocationCheckBox = (CheckBox)this.findViewById(R.id.current_location_checkbox);
try {
_currentLocationCheckBox.setOnCheckedChangeListener(new OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if(isChecked) {
if(checkGpsStatus()) {
MainActivity.this.getCurrentLocation();
InputMethodManager imm = (InputMethodManager)MainActivity.this.getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(_locationAutoCompleteTextView.getWindowToken(), 0);
_placesTask.cancel(true);
}
else {
showGpsDialog();
_currentLocationCheckBox.setChecked(false);
}
}
else {
_locationAutoCompleteTextView.setText("");
}
}
});
}
catch(Exception e) {
showErrorDialog("Error getting current location.");
Log.e(TAG, "Error getting current location", e);
}
}
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putBooleanArray("_oldSelectedItems", _oldSelectedItems);
outState.putIntegerArrayList("_seletedItems", _seletedItems);
}
/**
* onResume() is called after onCreate()
*/
@Override
protected void onResume() {
super.onResume();
}
/**
* onStop() will be called when the orientation is changed
*/
@Override
protected void onStop() {
super.onStop();
Log.d(TAG, "onStop");
if(_materialListDialog != null) {
_materialListDialog.dismiss();
}
if(_placesTask == null) {
_placesTask = new PlacesTask();
}
_isSearching = false;
}
/**
* onPause
*/
@Override
protected void onPause() {
super.onPause();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch(item.getItemId()) {
case(R.id.action_about):
showAbout();
return true;
case(R.id.reset):
_materialEditText.setText("");
_locationAutoCompleteTextView.setText("");
_currentLocationCheckBox.setChecked(false);
_oldSelectedItems = new boolean[_materialNames.length];
return true;
}
return false;
}
static class ViewHolder {
public TextView text;
public CheckBox checkbox;
}
/**
* private methods in this class
*/
private void showAbout() {
AlertDialog.Builder aboutDialogBuilder = new AlertDialog.Builder(MainActivity.this);
aboutDialogBuilder.setTitle("About Austin Recycling");
aboutDialogBuilder.setMessage("Developed by: David, Mike and Alex\n\n" +
"Advised by: Mike Scott\n\n" +
"Most location related features are powered by Google.\n\n" +
"Recycle Drop Off Locations from https://data.austintexas.gov");
aboutDialogBuilder.setNeutralButton("Done", new AlertDialog.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
AlertDialog aboutDialog = aboutDialogBuilder.create();
aboutDialog.show();
}
/**
* Display list of materials
* Selected materials will be removed from the list
*/
private void showMaterialDialog() {
Log.d(TAG, "entering popChooseMaterialDialog");
final AlertDialog.Builder materialDialogBuilder = new AlertDialog.Builder(this)
.setTitle("Please select materials");
_icons = MainActivity.this.getResources().obtainTypedArray(R.array.list_material_icon);
// Store MaterialItem into ArrayList
for(int i=0; i<_materialNames.length; ++i) {
_materialItemArray.add(new MaterialItem(_icons.getResourceId(i, 0), _materialNames[i]));
}
_icons.recycle();
final CharSequence[] items = new CharSequence[_materialNames.length];
final int[] localSelectedItems = new int[_materialNames.length];
for(int i=0; i<_materialNames.length; ++i) {
items[i] = _materialNames[i];
localSelectedItems[i] = 0;
}
ListAdapter adapter = new ArrayAdapter<MaterialItem>(this, R.layout.checkboxes, R.id.textView1, _materialItemArray){
public View getView(final int position, View convertView, android.view.ViewGroup parent) {
ViewHolder viewHolder = null;
if(convertView == null){
LayoutInflater inflator = getLayoutInflater();
convertView = inflator.inflate(R.layout.checkboxes, null);
viewHolder = new ViewHolder();
viewHolder.text = (TextView) convertView.findViewById(R.id.textView1);
viewHolder.checkbox = (CheckBox) convertView.findViewById(R.id.checkBox1);
convertView.setTag(viewHolder);
viewHolder.checkbox.setTag(_oldSelectedItems[position]);
}
else {
((ViewHolder) convertView.getTag()).checkbox.setTag(_oldSelectedItems[position]);
}
viewHolder = (ViewHolder) convertView.getTag();
// Put the image on the TextView
final Drawable image;
Resources res = getResources();
image = res.getDrawable(_materialItemArray.get(position).getIcon());
int dp25 = (int) (25 * getResources().getDisplayMetrics().density + 0.5f);
image.setBounds(0, 0, dp25, dp25);
viewHolder.text.setCompoundDrawables(image, null, null, null);
viewHolder.text.setText(_materialItemArray.get(position).getName());
//Add margin between image and text (support various screen densities)
int dp5 = (int) (5 * getResources().getDisplayMetrics().density + 0.5f);
viewHolder.text.setCompoundDrawablePadding(dp5);
//Ensure no other setOnCheckedChangeListener is attached before you manually change its state.
viewHolder.checkbox.setOnCheckedChangeListener(null);
if(_oldSelectedItems[position]) viewHolder.checkbox.setChecked(true);
else viewHolder.checkbox.setChecked(false);
viewHolder.checkbox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (isChecked) {
// If the user checked the item, add it to the selected items
Log.d("true","true");
_seletedItems.add(position);
_oldSelectedItems[position] = true;
localSelectedItems[position] = 2;
}
else if (_seletedItems.contains(position)) {
Log.d("false","false");
// Else, if the item is already in the array, remove it
_seletedItems.remove(Integer.valueOf(position));
_oldSelectedItems[position] = false;
localSelectedItems[position] = 1;
}
}
});
return convertView;
}
};
materialDialogBuilder.setAdapter(adapter, null);
// Set action for OK buttons
materialDialogBuilder.setPositiveButton("OK", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
String oldString = "";
for(int i=0; i<_materialNames.length; ++i) {
String clickedMaterial = items[i].toString();
if(_oldSelectedItems[i])
oldString = oldString.equals("") ? clickedMaterial : oldString + ", " + clickedMaterial;
}
_materialEditText.setText(oldString);
dialog.dismiss();
}
});
// Set action for Cancel buttons
materialDialogBuilder.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
String oldString = "";
for(int i=0; i<_materialNames.length; ++i) {
String clickedMaterial = items[i].toString();
if(localSelectedItems[i] == 2)
_oldSelectedItems[i] = false;
if(localSelectedItems[i] == 1)
_oldSelectedItems[i] = true;
if(_oldSelectedItems[i])
oldString = oldString.equals("") ? clickedMaterial : oldString + ", " + clickedMaterial;
}
_materialEditText.setText(oldString);
dialog.dismiss();
}
});
_materialListDialog = materialDialogBuilder.create();
_materialListDialog.show();
}
/**
* Check GPS status
* @return true if GPS is on
* @return false if GPS is off
*/
private boolean checkGpsStatus() {
ContentResolver contentResolver = MainActivity.this.getBaseContext().getContentResolver();
boolean gpsStatus = Settings.Secure.isLocationProviderEnabled(contentResolver, LocationManager.GPS_PROVIDER);
return gpsStatus;
}
/**
* Check mobile status
* @return true if mobile is on
* @return false if mobile is off
*/
private boolean checkMobileStatus() {
ContentResolver contentResolver = MainActivity.this.getBaseContext().getContentResolver();
boolean mobileStatus = Settings.Secure.isLocationProviderEnabled(contentResolver, LocationManager.NETWORK_PROVIDER);
return mobileStatus;
}
/**
* Get current location using GPS and display the address to _locationAutoCompleteTextView
*/
private void getCurrentLocation() {
_locationManager = (LocationManager)this.getSystemService(LOCATION_SERVICE);
Location location = null;
if (checkMobileStatus()) {
_locationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 15000, 10, new LocationListener() {
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
Log.d(TAG, "begin onStatusChanged");
Log.d(TAG, "provider: " + provider);
Log.d(TAG, "status: " + status);
Log.d(TAG, "extras: " + extras.describeContents());
Log.d(TAG, "end onStatusChanged");
}
@Override
public void onProviderEnabled(String provider) {
Log.d(TAG, "begin onProviderEnabled");
Log.d(TAG, "provider: " + provider);
Log.d(TAG, "end onProviderEnabled");
}
@Override
public void onProviderDisabled(String provider) {
Log.d(TAG, "begin onProviderDisabled");
Log.d(TAG, "provider: " + provider);
Log.d(TAG, "end onProviderDisabled");
}
@Override
public void onLocationChanged(Location location) {
Log.d(TAG, "begin onLocationChanged");
Log.d(TAG, "provider: " + location.describeContents());
_currentLat = location.getLatitude();
_currentLong = location.getLongitude();
Log.d(TAG, "end onLocationChanged");
}
});
Log.d("Network", "Network Enabled");
if (_locationManager != null) {
location = _locationManager.getLastKnownLocation(LocationManager.NETWORK_PROVIDER);
if (location != null) {
_currentLat = location.getLatitude();
_currentLong = location.getLongitude();
}
}
}
if (checkGpsStatus()) {
if (location == null) {
_locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 15000, 10, new LocationListener() {
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
Log.d(TAG, "begin onStatusChanged");
Log.d(TAG, "provider: " + provider);
Log.d(TAG, "status: " + status);
Log.d(TAG, "extras: " + extras.describeContents());
Log.d(TAG, "end onStatusChanged");
}
@Override
public void onProviderEnabled(String provider) {
Log.d(TAG, "begin onProviderEnabled");
Log.d(TAG, "provider: " + provider);
Log.d(TAG, "end onProviderEnabled");
}
@Override
public void onProviderDisabled(String provider) {
Log.d(TAG, "begin onProviderDisabled");
Log.d(TAG, "provider: " + provider);
Log.d(TAG, "end onProviderDisabled");
}
@Override
public void onLocationChanged(Location location) {
Log.d(TAG, "begin onLocationChanged");
Log.d(TAG, "provider: " + location.describeContents());
_currentLat = location.getLatitude();
_currentLong = location.getLongitude();
Log.d(TAG, "end onLocationChanged");
}
});
Log.d(TAG, "GPS Enabled");
if (_locationManager != null) {
location = _locationManager.getLastKnownLocation(LocationManager.GPS_PROVIDER);
if (location != null) {
_currentLat = location.getLatitude();
_currentLong = location.getLongitude();
}
}
}
}
// Convert coordinates to address
try {
List<Address> returnedAddress = _geocoder.getFromLocation(_currentLat, _currentLong, 5);
for(Address addr : returnedAddress) {
Log.d(TAG, "returnedAddress: " + addr.getAddressLine(0) + ", "
+ addr.getAddressLine(1) + ", "
+ addr.getAddressLine(2));
}
Address currentAddress = returnedAddress.get(0);
_locationAutoCompleteTextView.setText(currentAddress.getAddressLine(0) + ", "
+ currentAddress.getAddressLine(1) + ", "
+ currentAddress.getAddressLine(2));
}
catch (IOException e) {
showErrorDialog("Error getting current location.");
Log.e(TAG, "Error getting current location", e);
}
}
/**
* show error dialog when exception occurs
*/
private void showErrorDialog(String errorMessage) {
Log.d(TAG, "called");
final AlertDialog.Builder errorDialogBuilder = new AlertDialog.Builder(MainActivity.this);
errorDialogBuilder.setTitle("Error");
errorDialogBuilder.setMessage(errorMessage + "\nPlease try again.");
errorDialogBuilder.setNeutralButton("Ok", new AlertDialog.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
final AlertDialog errorDialog = errorDialogBuilder.create();
errorDialog.show();
}
/**
* Show locationDialog
*/
private void showLocationDialog() {
final AlertDialog.Builder locationDialogBuilder = new AlertDialog.Builder(MainActivity.this);
locationDialogBuilder.setTitle("Use current location");
locationDialogBuilder.setMessage("You did not enter an address. Would you like to use your current location?");
locationDialogBuilder.setNegativeButton("No", new AlertDialog.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
locationDialogBuilder.setPositiveButton("Yes", new AlertDialog.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if(checkGpsStatus()) {
MainActivity.this.getCurrentLocation();
_currentLocationCheckBox.setChecked(true);
}
else {
dialog.dismiss();
showGpsDialog();
_currentLocationCheckBox.setChecked(false);
}
}
});
final AlertDialog locationDialog = locationDialogBuilder.create();
locationDialog.show();
}
/**
* Show GPS Settings dialog
*/
private void showGpsDialog() {
final AlertDialog.Builder gpsDialogBuilder = new AlertDialog.Builder(MainActivity.this);
gpsDialogBuilder.setTitle("Turn on GPS");
gpsDialogBuilder.setMessage("Please turn on your GPS and try again.");
gpsDialogBuilder.setNegativeButton("Try again", new AlertDialog.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
_currentLocationCheckBox.setChecked(false);
}
});
gpsDialogBuilder.setPositiveButton("Go to Settings", new AlertDialog.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Log.d(TAG, "GPS Settings");
startActivity(new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS));
MainActivity.this.recreate();
}
});
final AlertDialog gpsDialog = gpsDialogBuilder.create();
gpsDialog.show();
}
/**
* Class to run HTTP network requests in a worker thread. Necessary to
* keep the UI interactive.
*
* Types specified are <Argument Type, Progress Update Type, Return Type>
*/
private class NetworkRequestTask extends AsyncTask<String, Float, ArrayList<FacilityItem>> {
private static final String TAG = "MainActivity.NetworkRequestTask";
@Override
protected ArrayList<FacilityItem> doInBackground(String... materials) {
Log.d(TAG, "begin doInBackground");
Model m = new Model(_currentLat, _currentLong);
Log.d(TAG, "end doInBackground");
return m.getFacilities(materials);
}
@Override
protected void onPreExecute() {
_searchButton.startAnimation(AnimationUtils.loadAnimation(MainActivity.this, R.anim.rotate));
}
/**
* Invoked in asynchronously in MainActivity when the network request
* has finished and doInBackground returns its result.
*/
@Override
protected void onPostExecute(ArrayList<FacilityItem> facilities) {
Log.d(TAG, "begin onPostExecute");
// Start the ResultListActivity
Intent resultIntent = new Intent(MainActivity.this, ResultListActivity.class);
resultIntent.putParcelableArrayListExtra("RETURNED_RESULT", (ArrayList<? extends Parcelable>) facilities);
resultIntent.putExtra("CURRENT_LAT", _currentLat);
resultIntent.putExtra("CURRENT_LONG", _currentLong);
MainActivity.this.startActivity(resultIntent);
Log.d(TAG, "end onPostExecute");
}
}
/**
* Asynchronously get place suggestions from Google
*/
private class PlacesTask extends AsyncTask<String, Void, ArrayList<String>> {
private static final String TAG = "MainActivity.PlacesTask";
private static final String PLACES_API_BASE = "https://maps.googleapis.com/maps/api/place";
private static final String TYPE_AUTOCOMPLETE = "/autocomplete";
private static final String OUT_JSON = "/json";
private static final String API_KEY = "AIzaSyCnLUmKZNvy5P7R2p1RJw2fd4VGNRbcJBU";
protected ArrayList<String> doInBackground(String... input) {
Log.d(TAG, "Async PlacesTask doInBackground(): ");
ArrayList<String> resultList = null;
HttpURLConnection conn = null;
StringBuilder jsonResults = new StringBuilder();
try {
StringBuilder sb = new StringBuilder(PLACES_API_BASE + TYPE_AUTOCOMPLETE + OUT_JSON);
sb.append("?sensor=true");
sb.append("&components=country:us");
sb.append("&input=" + URLEncoder.encode(input[0], "utf8"));
sb.append("&country=austin");
sb.append("&types=geocode");
sb.append("&key=" + API_KEY);
URL url = new URL(sb.toString());
conn = (HttpURLConnection) url.openConnection();
InputStreamReader in = new InputStreamReader(conn.getInputStream());
// Load the results into a StringBuilder
int read;
char[] buff = new char[1024];
while ((read = in.read(buff)) != -1) {
jsonResults.append(buff, 0, read);
}
}
catch (MalformedURLException e) {
showErrorDialog("Error connecting to Google.");
Log.e(TAG, "Error processing Places API URL", e);
}
catch (IOException e) {
showErrorDialog("Error connecting to Google.");
Log.e(TAG, "Error connecting to Places API", e);
}
catch(Exception e) {
showErrorDialog("Error connecting to Google.");
Log.e(TAG, "Error connecting to Places API", e);
}
finally {
if (conn != null) {
conn.disconnect();
}
}
try {
// Create a JSON object hierarchy from the results
JSONObject jsonObj = new JSONObject(jsonResults.toString());
JSONArray predsJsonArray = jsonObj.getJSONArray("predictions");
// Extract the Place descriptions from the results
resultList = new ArrayList<String>(predsJsonArray.length());
for (int i = 0; i < predsJsonArray.length(); i++) {
resultList.add(predsJsonArray.getJSONObject(i).getString("description"));
}
}
catch (JSONException e) {
showErrorDialog("Error connecting to Google.");
Log.e(TAG, "Cannot process JSON results", e);
}
return resultList;
}
protected void onPostExecute(ArrayList<String> resultList) {
Log.d(TAG, "Async PlacesTask onPostExecute(): ");
LocationAutoCompleteAdapter locAdapter = new LocationAutoCompleteAdapter(MainActivity.this, R.layout.location_list_item, resultList);
_locationAutoCompleteTextView.setAdapter(locAdapter);
_locationAutoCompleteTextView.showDropDown();
}
}
}
| |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.flakeidgen.impl;
import com.hazelcast.cluster.Address;
import com.hazelcast.cluster.impl.MemberImpl;
import com.hazelcast.config.Config;
import com.hazelcast.config.FlakeIdGeneratorConfig;
import com.hazelcast.core.HazelcastException;
import com.hazelcast.flakeidgen.impl.FlakeIdGeneratorProxy.IdBatchAndWaitTime;
import com.hazelcast.internal.cluster.ClusterService;
import com.hazelcast.logging.ILogger;
import com.hazelcast.logging.Logger;
import com.hazelcast.spi.impl.NodeEngine;
import com.hazelcast.test.HazelcastSerialClassRunner;
import com.hazelcast.test.annotation.ParallelJVMTest;
import com.hazelcast.test.annotation.QuickTest;
import com.hazelcast.version.MemberVersion;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import java.net.UnknownHostException;
import java.util.Date;
import java.util.Iterator;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
import static com.hazelcast.config.FlakeIdGeneratorConfig.DEFAULT_ALLOWED_FUTURE_MILLIS;
import static com.hazelcast.config.FlakeIdGeneratorConfig.DEFAULT_BITS_NODE_ID;
import static com.hazelcast.config.FlakeIdGeneratorConfig.DEFAULT_BITS_SEQUENCE;
import static com.hazelcast.config.FlakeIdGeneratorConfig.DEFAULT_EPOCH_START;
import static com.hazelcast.flakeidgen.impl.FlakeIdGeneratorProxy.NODE_ID_UPDATE_INTERVAL_NS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@RunWith(HazelcastSerialClassRunner.class)
@Category({QuickTest.class, ParallelJVMTest.class})
public class FlakeIdGeneratorProxyTest {
/**
* Available number of IDs per second from a single member
*/
private final int IDS_PER_SECOND = 1 << DEFAULT_BITS_SEQUENCE;
private static final int MAX_BIT_LENGTH = 63;
private final long DEFAULT_BITS_TIMESTAMP = MAX_BIT_LENGTH - (DEFAULT_BITS_NODE_ID + DEFAULT_BITS_SEQUENCE);
private static final ILogger LOG = Logger.getLogger(FlakeIdGeneratorProxyTest.class);
@Rule
public ExpectedException exception = ExpectedException.none();
private FlakeIdGeneratorProxy gen;
private ClusterService clusterService;
@Before
public void before() {
initialize(new FlakeIdGeneratorConfig());
}
public void initialize(FlakeIdGeneratorConfig config) {
ILogger logger = mock(ILogger.class);
clusterService = mock(ClusterService.class);
NodeEngine nodeEngine = mock(NodeEngine.class);
FlakeIdGeneratorService service = mock(FlakeIdGeneratorService.class);
when(nodeEngine.getLogger(FlakeIdGeneratorProxy.class)).thenReturn(logger);
when(nodeEngine.isRunning()).thenReturn(true);
config.setName("foo");
when(nodeEngine.getConfig()).thenReturn(new Config().addFlakeIdGeneratorConfig(config));
when(nodeEngine.getClusterService()).thenReturn(clusterService);
Address address = null;
try {
address = new Address("127.0.0.1", 5701);
} catch (UnknownHostException e) {
// no-op
}
when(nodeEngine.getLocalMember()).thenReturn(new MemberImpl(address, MemberVersion.UNKNOWN, true, UUID.randomUUID()));
UUID source = nodeEngine.getLocalMember().getUuid();
gen = new FlakeIdGeneratorProxy("foo", nodeEngine, service, source);
}
@Test
public void when_nodeIdUpdated_then_pickedUpAfterUpdateInterval() {
when(clusterService.getMemberListJoinVersion()).thenReturn(20);
assertEquals(20, gen.getNodeId(0));
when(clusterService.getMemberListJoinVersion()).thenReturn(30);
assertEquals(20, gen.getNodeId(0));
assertEquals(20, gen.getNodeId(NODE_ID_UPDATE_INTERVAL_NS - 1));
assertEquals(30, gen.getNodeId(NODE_ID_UPDATE_INTERVAL_NS));
}
@Test
public void givenNodeIdUninitialized_whenNodeIdRequestedConcurrently_thenItNeverReturnUninitializedId() throws Exception {
when(clusterService.getMemberListJoinVersion()).thenReturn(20);
int threadCount = 20;
int iterationCount = 5_000;
ExecutorService executorService = Executors.newFixedThreadPool(threadCount);
AtomicInteger errorCounter = new AtomicInteger();
FlakeIdGeneratorConfig genConfig = new FlakeIdGeneratorConfig();
for (int i = 0; i < iterationCount; i++) {
initialize(genConfig);
FlakeIdGeneratorProxy localGen = gen;
Runnable getNodeId = () -> {
if (localGen.getNodeId(0) == -1) { // see FlakeIdGeneratorProxy#NODE_ID_NOT_YET_SET
errorCounter.incrementAndGet();
}
};
for (int z = 0; z < threadCount; z++) {
executorService.submit(getNodeId);
}
}
executorService.shutdown();
executorService.awaitTermination(30, SECONDS);
assertEquals(0, errorCounter.get());
}
@Test
public void test_timeLowPositiveEdge() {
long id = gen.newIdBaseLocal(DEFAULT_EPOCH_START, 1234, 10).idBatch.base();
assertEquals(1234L, id);
}
@Test
public void test_timeMiddle() {
long id = gen.newIdBaseLocal(1516028439000L, 1234, 10).idBatch.base();
assertEquals(5300086112257234L, id);
}
@Test
public void test_timeHighEdge() {
IdBatchAndWaitTime result = gen.newIdBaseLocal(DEFAULT_EPOCH_START + (1L << DEFAULT_BITS_TIMESTAMP) - 1L, 1234, 10);
assertEquals(9223372036850582738L, result.idBatch.base());
}
@Test
public void test_negativeId() {
long id = gen.newIdBaseLocal(DEFAULT_EPOCH_START - 1, 1234, 10).idBatch.base();
assertEquals((-1 << DEFAULT_BITS_SEQUENCE + DEFAULT_BITS_NODE_ID) + 1234, id);
}
@Test
public void test_lowNegativeEdge() {
long timestamp = -(1L << DEFAULT_BITS_TIMESTAMP);
long id = gen.newIdBaseLocal(DEFAULT_EPOCH_START + timestamp, 1234, 10).idBatch.base();
assertEquals(Long.MIN_VALUE + 1234, id);
}
@Test
public void test_idsOrdered() {
long lastId = -1;
for (long now = DEFAULT_EPOCH_START;
now < DEFAULT_EPOCH_START + (1L << DEFAULT_BITS_TIMESTAMP);
now += 365L * 24L * 60L * 60L * 1000L) {
long base = gen.newIdBaseLocal(now, 1234, 1).idBatch.base();
LOG.info("at " + new Date(now) + ", id=" + base);
assertTrue("lastId=" + lastId + ", newId=" + base, lastId < base);
lastId = base;
}
}
@Test
public void when_currentTimeBeforeAllowedRange_then_fail() {
long lowestGoodTimestamp = DEFAULT_EPOCH_START - (1L << DEFAULT_BITS_TIMESTAMP);
gen.newIdBaseLocal(lowestGoodTimestamp, 0, 1);
exception.expect(HazelcastException.class);
exception.expectMessage("Current time out of allowed range");
gen.newIdBaseLocal(lowestGoodTimestamp - 1, 0, 1);
}
@Test
public void when_currentTimeAfterAllowedRange_then_fail() {
gen.newIdBaseLocal(DEFAULT_EPOCH_START + (1L << DEFAULT_BITS_TIMESTAMP) - 1, 0, 1);
exception.expect(HazelcastException.class);
exception.expectMessage("Current time out of allowed range");
gen.newIdBaseLocal(DEFAULT_EPOCH_START + (1L << DEFAULT_BITS_TIMESTAMP), 0, 1);
}
@Test
public void when_twoIdsAtTheSameMoment_then_higherSeq() {
long id1 = gen.newIdBaseLocal(1516028439000L, 1234, 1).idBatch.base();
long id2 = gen.newIdBaseLocal(1516028439000L, 1234, 1).idBatch.base();
assertEquals(5300086112257234L, id1);
assertEquals(id1 + (1 << DEFAULT_BITS_NODE_ID), id2);
}
@Test
public void test_positiveNodeIdOffset() {
int nodeIdOffset = 5;
int memberListJoinVersion = 20;
initialize(new FlakeIdGeneratorConfig().setNodeIdOffset(nodeIdOffset));
when(clusterService.getMemberListJoinVersion()).thenReturn(memberListJoinVersion);
assertEquals((memberListJoinVersion + nodeIdOffset), gen.getNodeId(0));
}
@Test
public void when_customBits_then_used() {
int bitsSequence = 10;
int bitsNodeId = 11;
initialize(new FlakeIdGeneratorConfig()
.setBitsSequence(bitsSequence)
.setBitsNodeId(bitsNodeId)
.setEpochStart(0));
Iterator<Long> result = gen.newIdBaseLocal(1, 1234, 2).idBatch.iterator();
long expected = (1L << bitsSequence + bitsNodeId) + 1234;
assertEquals(expected, result.next().longValue());
expected += 1L << bitsNodeId;
assertEquals(expected, result.next().longValue());
}
@Test
public void when_epochStart_then_used() {
int epochStart = 456;
int timeSinceEpochStart = 1;
initialize(new FlakeIdGeneratorConfig().setEpochStart(epochStart));
long id = gen.newIdBaseLocal(epochStart + timeSinceEpochStart, 1234, 10).idBatch.base();
assertEquals((timeSinceEpochStart << DEFAULT_BITS_SEQUENCE + DEFAULT_BITS_NODE_ID) + 1234, id);
}
// #### Tests pertaining to wait time ####
@Test
public void when_fewIds_then_noWaitTime() {
assertEquals(0, gen.newIdBaseLocal(1516028439000L, 1234, 100).waitTimeMillis);
}
@Test
public void when_maximumAllowedFuture_then_noWaitTime() {
IdBatchAndWaitTime result = gen.newIdBaseLocal(1516028439000L, 1234, (int) (IDS_PER_SECOND * DEFAULT_ALLOWED_FUTURE_MILLIS));
assertEquals(0, result.waitTimeMillis);
}
@Test
public void when_maximumAllowedFuturePlusOne_then_1msWaitTime() {
int batchSize = (int) (IDS_PER_SECOND * DEFAULT_ALLOWED_FUTURE_MILLIS) + IDS_PER_SECOND;
IdBatchAndWaitTime result = gen.newIdBaseLocal(1516028439000L, 1234, batchSize);
assertEquals(1, result.waitTimeMillis);
}
@Test
public void when_10mIdsInOneBatch_then_wait() {
int batchSize = 10_000_000;
IdBatchAndWaitTime result = gen.newIdBaseLocal(1516028439000L, 1234, batchSize);
assertEquals(batchSize / IDS_PER_SECOND - DEFAULT_ALLOWED_FUTURE_MILLIS, result.waitTimeMillis);
}
@Test
public void when_10mIdsInSmallBatches_then_wait() {
int batchSize = 1000;
for (int numIds = 0; numIds < 10_000_000; numIds += batchSize) {
IdBatchAndWaitTime result = gen.newIdBaseLocal(1516028439000L, 1234, batchSize);
assertEquals(Math.max(0, (numIds + batchSize) / IDS_PER_SECOND - DEFAULT_ALLOWED_FUTURE_MILLIS), result.waitTimeMillis);
}
}
}
| |
// ----------------------------------------------------------------------------
// Copyright 2006-2010, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2008/08/08 Martin D. Flynn
// -Initial release
// 2008/08/17 Martin D. Flynn
// -Added "Distance" title line (below "Cursor Location")
// -Fix display of View/Edit buttons on creation of first user.
// 2008/09/01 Martin D. Flynn
// -Added delete confirmation
// 2008/10/16 Martin D. Flynn
// -Update with new ACL usage
// 2008/12/01 Martin D. Flynn
// -Added ability to display multiple points
// 2009/08/23 Martin D. Flynn
// -Convert new entered IDs to lowercase
// 2010/04/11 Martin D. Flynn
// -Added support for drawing polygons and corridors, however, MapProvider
// support for these features is also required (and in the case of corridors,
// may also require add-on module support to use the corridor geozones
// properly).
// ----------------------------------------------------------------------------
package org.opengts.war.track.page;
import java.util.*;
import java.io.*;
import javax.servlet.*;
import javax.servlet.http.*;
import org.opengts.util.*;
import org.opengts.dbtools.*;
import org.opengts.db.*;
import org.opengts.db.tables.*;
import org.opengts.geocoder.GeocodeProvider;
import org.opengts.war.tools.*;
import org.opengts.war.track.*;
public class ZoneInfo
extends WebPageAdaptor
implements Constants
{
// ------------------------------------------------------------------------
// 'private.xml' properties
// PrivateLabel.PROP_ZoneInfo_mapControlLocation
private static final String CONTROLS_ON_LEFT[] = new String[] { "left", "true" };
// ------------------------------------------------------------------------
private static final double MIN_RADIUS_METERS = Geozone.MIN_RADIUS_METERS;
private static final double MAX_RADIUS_METERS = Geozone.MAX_RADIUS_METERS;
private static final double DEFAULT_ZONE_RADIUS = 20000.0;
// ------------------------------------------------------------------------
private static final String OVERLAP_PRIORITY[] = new String[] { "0", "1", "2", "3", "4", "5" };
// ------------------------------------------------------------------------
// Parameters
// forms
public static final String FORM_ZONE_SELECT = "ZoneInfoSelect";
public static final String FORM_ZONE_EDIT = "ZoneInfoEdit";
public static final String FORM_ZONE_NEW = "ZoneInfoNew";
// commands
public static final String COMMAND_INFO_UPDATE = "update";
public static final String COMMAND_INFO_SELECT = "select";
public static final String COMMAND_INFO_NEW = "new";
// submit
public static final String PARM_SUBMIT_EDIT = "z_subedit";
public static final String PARM_SUBMIT_VIEW = "z_subview";
public static final String PARM_SUBMIT_CHG = "z_subchg";
public static final String PARM_SUBMIT_DEL = "z_subdel";
public static final String PARM_SUBMIT_NEW = "z_subnew";
// buttons
public static final String PARM_BUTTON_CANCEL = "u_btncan";
public static final String PARM_BUTTON_BACK = "u_btnbak";
// parameters
public static final String PARM_NEW_ID = "z_newid";
public static final String PARM_NEW_TYPE = "z_newtype";
// parameters
public static final String PARM_ZONE_SELECT = "z_zone";
public static final String PARM_PRIORITY = "z_priority";
public static final String PARM_REV_GEOCODE = "z_revgeo";
public static final String PARM_ARRIVE_NOTIFY = "z_arrive";
public static final String PARM_DEPART_NOTIFY = "z_depart";
public static final String PARM_CLIENT_UPLOAD = "z_upload";
public static final String PARM_CLIENT_ID = "z_clntid";
public static final String PARM_ZONE_DESC = "z_desc";
public static final String PARM_ZONE_RADIUS = "z_radius";
public static final String PARM_ZONE_INDEX = "z_index";
public static final String PARM_ZONE_COLOR = "z_color";
public static final String PARM_ZONE_LATITUDE_ = "z_lat";
private static String PARM_ZONE_LATITUDE(int ndx)
{
return PARM_ZONE_LATITUDE_ + ndx;
}
public static final String PARM_ZONE_LONGITUDE_ = "z_lon";
public static final String PARM_ZONE_LONGITUDE(int ndx)
{
return PARM_ZONE_LONGITUDE_ + ndx;
}
// sort ID
private static final int DEFAULT_SORT_ID = 0;
// point index
private static final int DEFAULT_POINT_INDEX = 0;
// ------------------------------------------------------------------------
// WebPage interface
public ZoneInfo()
{
this.setBaseURI(Track.BASE_URI());
this.setPageName(PAGE_ZONE_INFO);
this.setPageNavigation(new String[] { PAGE_LOGIN, PAGE_MENU_TOP });
this.setLoginRequired(true);
}
// ------------------------------------------------------------------------
public String getMenuName(RequestProperties reqState)
{
return MenuBar.MENU_ADMIN;
}
public String getMenuDescription(RequestProperties reqState, String parentMenuName)
{
PrivateLabel privLabel = reqState.getPrivateLabel();
I18N i18n = privLabel.getI18N(ZoneInfo.class);
return super._getMenuDescription(reqState,i18n.getString("ZoneInfo.editMenuDesc","View/Edit Geozone Information"));
}
public String getMenuHelp(RequestProperties reqState, String parentMenuName)
{
PrivateLabel privLabel = reqState.getPrivateLabel();
I18N i18n = privLabel.getI18N(ZoneInfo.class);
return super._getMenuHelp(reqState,i18n.getString("ZoneInfo.editMenuHelp","View and Edit Geozone information"));
}
// ------------------------------------------------------------------------
public String getNavigationDescription(RequestProperties reqState)
{
PrivateLabel privLabel = reqState.getPrivateLabel();
I18N i18n = privLabel.getI18N(ZoneInfo.class);
return super._getNavigationDescription(reqState,i18n.getString("ZoneInfo.navDesc","Geozone Admin"));
}
public String getNavigationTab(RequestProperties reqState)
{
PrivateLabel privLabel = reqState.getPrivateLabel();
I18N i18n = privLabel.getI18N(ZoneInfo.class);
return super._getNavigationTab(reqState,i18n.getString("ZoneInfo.navTab","Geozone Admin"));
}
// ------------------------------------------------------------------------
private static int parseClientUploadFlag(PrivateLabel privLabel)
{
String uplFlag = privLabel.getStringProperty(PrivateLabel.PROP_ZoneInfo_showClientUploadZone,"");
if (StringTools.isBlank(uplFlag)) {
return 0; // do not show
} else
if (uplFlag.equalsIgnoreCase("false")) {
return 0; // do not show
} else
if (uplFlag.equalsIgnoreCase("true") || uplFlag.equalsIgnoreCase("checkbox")) {
return 1; // show checkbox only
} else
if (uplFlag.equalsIgnoreCase("id")) {
return 2; // show id field only
} else {
return 0;
}
}
private static ComboMap GetColorComboMap(I18N i18n)
{
ComboMap cc = new ComboMap();
cc.add("" ,i18n.getString("ZoneInfo.color.default","Default"));
cc.add(ColorTools.BLACK.toString(true) ,i18n.getString("ZoneInfo.color.black" ,"Black" ));
cc.add(ColorTools.BROWN.toString(true) ,i18n.getString("ZoneInfo.color.brown" ,"Brown" ));
cc.add(ColorTools.RED.toString(true) ,i18n.getString("ZoneInfo.color.red" ,"Red" ));
cc.add(ColorTools.ORANGE.toString(true) ,i18n.getString("ZoneInfo.color.orange" ,"Orange" ));
cc.add(ColorTools.YELLOW.toString(true) ,i18n.getString("ZoneInfo.color.yellow" ,"Yellow" ));
cc.add(ColorTools.GREEN.toString(true) ,i18n.getString("ZoneInfo.color.green" ,"Green" ));
cc.add(ColorTools.BLUE.toString(true) ,i18n.getString("ZoneInfo.color.blue" ,"Blue" ));
cc.add(ColorTools.PURPLE.toString(true) ,i18n.getString("ZoneInfo.color.purple" ,"Purple" ));
cc.add(ColorTools.DARK_GRAY.toString(true) ,i18n.getString("ZoneInfo.color.gray" ,"Gray" ));
cc.add(ColorTools.WHITE.toString(true) ,i18n.getString("ZoneInfo.color.white" ,"White" ));
cc.add(ColorTools.CYAN.toString(true) ,i18n.getString("ZoneInfo.color.cyan" ,"Cyan" ));
cc.add(ColorTools.PINK.toString(true) ,i18n.getString("ZoneInfo.color.pink" ,"Pink" ));
return cc;
}
public void writePage(
final RequestProperties reqState,
String pageMsg)
throws IOException
{
final HttpServletRequest request = reqState.getHttpServletRequest();
final PrivateLabel privLabel = reqState.getPrivateLabel(); // never null
final I18N i18n = privLabel.getI18N(ZoneInfo.class);
final Locale locale = reqState.getLocale();
final Account currAcct = reqState.getCurrentAccount(); // never null
final User currUser = reqState.getCurrentUser(); // may be null
final String pageName = this.getPageName();
final boolean showOverlapPriority = Geozone.supportsPriority() && privLabel.getBooleanProperty(PrivateLabel.PROP_ZoneInfo_showOverlapPriority,false);
final boolean showArriveDepartZone = Device.hasRuleFactory() || privLabel.getBooleanProperty(PrivateLabel.PROP_ZoneInfo_showArriveDepartZone,false);
final int showClientUploadZone = parseClientUploadFlag(privLabel);
String m = pageMsg;
boolean error = false;
/* list of geozones */
String zoneList[] = null;
try {
zoneList = Geozone.getGeozoneIDsForAccount(currAcct.getAccountID());
} catch (DBException dbe) {
zoneList = new String[0];
}
/* selected geozone */
String selZoneID = AttributeTools.getRequestString(reqState.getHttpServletRequest(), PARM_ZONE_SELECT, "");
if (StringTools.isBlank(selZoneID)) {
if ((zoneList.length > 0) && (zoneList[0] != null)) {
selZoneID = zoneList[0];
} else {
selZoneID = "";
}
//Print.logWarn("No Zone selected, choosing first zone: %s", selZoneID);
}
if (zoneList.length == 0) {
zoneList = new String[] { selZoneID };
}
/* Geozone db */
Geozone selZone = null;
try {
selZone = !selZoneID.equals("")? Geozone.getGeozone(currAcct,selZoneID,DEFAULT_SORT_ID,false) : null;
} catch (DBException dbe) {
// ignore
}
/* ACL */
boolean allowNew = privLabel.hasAllAccess(currUser, this.getAclName());
boolean allowDelete = allowNew;
boolean allowEdit = allowNew || privLabel.hasWriteAccess(currUser, this.getAclName());
boolean allowView = allowEdit || privLabel.hasReadAccess(currUser, this.getAclName());
/* command */
String zoneCmd = reqState.getCommandName();
boolean listZones = false;
boolean updateZone = zoneCmd.equals(COMMAND_INFO_UPDATE);
boolean selectZone = zoneCmd.equals(COMMAND_INFO_SELECT);
boolean newZone = zoneCmd.equals(COMMAND_INFO_NEW);
boolean deleteZone = false;
boolean editZone = false;
boolean viewZone = false;
/* submit buttons */
String submitEdit = AttributeTools.getRequestString(request, PARM_SUBMIT_EDIT, "");
String submitView = AttributeTools.getRequestString(request, PARM_SUBMIT_VIEW, "");
String submitChange = AttributeTools.getRequestString(request, PARM_SUBMIT_CHG , "");
String submitNew = AttributeTools.getRequestString(request, PARM_SUBMIT_NEW , "");
String submitDelete = AttributeTools.getRequestString(request, PARM_SUBMIT_DEL , "");
/* MapProvider support */
final MapProvider mapProvider = reqState.getMapProvider(); // check below to make sure this is not null
final boolean mapSupportsCursorLocation = ((mapProvider != null) && mapProvider.isFeatureSupported(MapProvider.FEATURE_LATLON_DISPLAY));
final boolean mapSupportsDistanceRuler = ((mapProvider != null) && mapProvider.isFeatureSupported(MapProvider.FEATURE_DISTANCE_RULER));
final boolean mapSupportsGeozones = ((mapProvider != null) && mapProvider.isFeatureSupported(MapProvider.FEATURE_GEOZONES));
/* sub-command */
String newZoneID = null;
int newZoneType = Geozone.GeozoneType.POINT_RADIUS.getIntValue(); // default
if (newZone) {
if (!allowNew) {
// not authorized to create new Geozones
Print.logInfo("Not authorized to create a new Geozone ...");
newZone = false;
} else {
HttpServletRequest httpReq = reqState.getHttpServletRequest();
newZoneID = AttributeTools.getRequestString(httpReq,PARM_NEW_ID,"").trim().toLowerCase();
newZoneType = AttributeTools.getRequestInt(httpReq,PARM_NEW_TYPE, newZoneType);
if (StringTools.isBlank(newZoneID)) {
m = i18n.getString("ZoneInfo.enterNewZone","Please enter a new Geozone name.");
error = true;
newZone = false;
} else
if (!WebPageAdaptor.isValidID(reqState, PrivateLabel.PROP_ZoneInfo_validateNewIDs, newZoneID)) {
m = i18n.getString("ZoneInfo.invalidIDChar","ID contains invalid characters");
error = true;
newZone = false;
}
}
} else
if (updateZone) {
if (!allowEdit) {
// not authorized to update Geozones
updateZone = false;
} else
if (!SubmitMatch(submitChange,i18n.getString("ZoneInfo.change","Change"))) {
updateZone = false;
} else
if (selZone == null) {
// should not occur
m = i18n.getString("ZoneInfo.unableToUpdate","Unable to update Geozone, ID not found");
error = true;
updateZone = false;
}
} else
if (selectZone) {
if (SubmitMatch(submitDelete,i18n.getString("ZoneInfo.delete","Delete"))) {
if (allowDelete) {
deleteZone = true;
}
} else
if (SubmitMatch(submitEdit,i18n.getString("ZoneInfo.edit","Edit"))) {
if (allowEdit) {
if (selZone == null) {
m = i18n.getString("ZoneInfo.pleaseSelectGeozone","Please select a Geozone");
error = true;
listZones = true;
} else {
editZone = true;
viewZone = true;
}
}
} else
if (SubmitMatch(submitView,i18n.getString("ZoneInfo.view","View"))) {
if (allowView) {
if (selZone == null) {
m = i18n.getString("ZoneInfo.pleaseSelectGeozone","Please select a Geozone");
error = true;
listZones = true;
} else {
viewZone = true;
}
}
} else {
listZones = true;
}
} else {
listZones = true;
}
/* delete Geozone? */
if (deleteZone) {
if (selZone == null) {
m = i18n.getString("ZoneInfo.pleaseSelectGeozone","Please select a Geozone");
error = true;
} else {
try {
Geozone.Key zoneKey = (Geozone.Key)selZone.getRecordKey();
Print.logWarn("Deleting Geozone: " + zoneKey);
zoneKey.delete(true); // will also delete dependencies
selZoneID = "";
selZone = null;
zoneList = Geozone.getGeozoneIDsForAccount(currAcct.getAccountID());
if ((zoneList != null) && (zoneList.length > 0)) {
selZoneID = zoneList[0];
try {
selZone = !selZoneID.equals("")?Geozone.getGeozone(currAcct,selZoneID,DEFAULT_SORT_ID,false):null;
} catch (DBException dbe) {
// ignore
}
}
} catch (DBException dbe) {
Print.logException("Deleting Geozone", dbe);
m = i18n.getString("ZoneInfo.errorDelete","Internal error deleting Geozone");
error = true;
}
}
listZones = true;
}
/* new Geozone? */
if (newZone) {
boolean createZoneOK = true;
//Print.logInfo("Creating new Geozone: %s", newZoneID);
for (int u = 0; u < zoneList.length; u++) {
if (newZoneID.equalsIgnoreCase(zoneList[u])) {
m = i18n.getString("ZoneInfo.alreadyExists","This Geozone already exists");
error = true;
createZoneOK = false;
break;
}
}
if (createZoneOK) {
try {
Geozone zone = Geozone.getGeozone(currAcct, newZoneID, DEFAULT_SORT_ID, true); // create
zone.setZoneType(newZoneType);
zone.setDefaultRadius(); // based on zone type
zone.save(); // needs to be saved to be created
zoneList = Geozone.getGeozoneIDsForAccount(currAcct.getAccountID());
selZone = zone;
selZoneID = selZone.getGeozoneID();
m = i18n.getString("ZoneInfo.createdZone","New Geozone has been created");
} catch (DBException dbe) {
Print.logException("Error Creating Geozone", dbe);
m = i18n.getString("ZoneInfo.errorCreate","Internal error creating Geozone");
error = true;
}
}
listZones = true;
}
/* change/update the Geozone info? */
if (updateZone) {
int zonePriority = StringTools.parseInt(AttributeTools.getRequestString(request,PARM_PRIORITY,null),0);
boolean zoneRevGeocode = !StringTools.isBlank(AttributeTools.getRequestString(request,PARM_REV_GEOCODE,null));
boolean zoneArrNotify = !StringTools.isBlank(AttributeTools.getRequestString(request,PARM_ARRIVE_NOTIFY,null));
boolean zoneDepNotify = !StringTools.isBlank(AttributeTools.getRequestString(request,PARM_DEPART_NOTIFY,null));
boolean zoneClientUpld = !StringTools.isBlank(AttributeTools.getRequestString(request,PARM_CLIENT_UPLOAD,null));
int zoneClientID = StringTools.parseInt(AttributeTools.getRequestString(request,PARM_CLIENT_ID,null),0);
long zoneRadius = StringTools.parseLong(AttributeTools.getRequestString(request,PARM_ZONE_RADIUS,null),100L);
String zoneColor = AttributeTools.getRequestString(request,PARM_ZONE_COLOR,null);
String zoneDesc = AttributeTools.getRequestString(request,PARM_ZONE_DESC,"");
//Print.logInfo("Updating Zone: %s - %s", selZoneID, zoneDesc);
try {
if (selZone != null) {
boolean saveOK = true;
// Overlap priority
if (showOverlapPriority) {
selZone.setPriority(zonePriority);
}
// ReverseGeocode
selZone.setReverseGeocode(zoneRevGeocode);
// Arrive/Depart notification
if (showArriveDepartZone) {
selZone.setArrivalZone(zoneArrNotify);
selZone.setDepartureZone(zoneDepNotify);
}
// Client upload zone
if (showClientUploadZone != 0) {
if (zoneClientID > 0) {
selZone.setClientUpload(true);
selZone.setClientID(zoneClientID);
} else
if (zoneClientUpld) {
selZone.setClientUpload(true);
selZone.setClientID(1);
} else {
selZone.setClientUpload(false);
selZone.setClientID(0);
}
}
// Radius (meters)
if (zoneRadius > 0L) {
selZone.setRadius((int)zoneRadius);
}
// description
if (!StringTools.isBlank(zoneColor)) {
selZone.setShapeColor(zoneColor);
}
// GeoPoints
selZone.clearGeoPoints();
int pointCount = (mapProvider != null)? mapProvider.getGeozoneSupportedPointCount(selZone.getZoneType()) : 0;
for (int z = 0, p = 0; z < pointCount; z++) {
double zoneLat = StringTools.parseDouble(AttributeTools.getRequestString(request,PARM_ZONE_LATITUDE (z),null),0.0);
double zoneLon = StringTools.parseDouble(AttributeTools.getRequestString(request,PARM_ZONE_LONGITUDE(z),null),0.0);
if (GeoPoint.isValid(zoneLat,zoneLon)) {
selZone.setGeoPoint(p++, zoneLat, zoneLon);
}
}
// description
if (!StringTools.isBlank(zoneDesc)) {
selZone.setDescription(zoneDesc);
}
// save
if (saveOK) {
selZone.save();
m = i18n.getString("ZoneInfo.zoneUpdated","Geozone information updated");
} else {
// error occurred, should stay on this page
editZone = true;
}
} else {
m = i18n.getString("ZoneInfo.noZones","There are currently no defined Geozones for this Account.");
}
} catch (Throwable t) {
Print.logException("Updating Geozone", t);
m = i18n.getString("ZoneInfo.errorUpdate","Internal error updating Geozone");
error = true;
}
listZones = true;
}
/* final vars */
final String _selZoneID = selZoneID;
final Geozone _selZone = selZone;
final String _zoneList[] = zoneList;
final boolean _allowEdit = allowEdit;
final boolean _allowView = allowView;
final boolean _allowNew = allowNew;
final boolean _allowDelete = allowDelete;
final boolean _editZone = _allowEdit && editZone;
final boolean _viewZone = _editZone || viewZone;
final boolean _listZones = listZones || (!_editZone && !_viewZone);
/* Style */
HTMLOutput HTML_CSS = new HTMLOutput() {
public void write(PrintWriter out) throws IOException {
String cssDir = ZoneInfo.this.getCssDirectory();
WebPageAdaptor.writeCssLink(out, reqState, "ZoneInfo.css", cssDir);
}
};
/* JavaScript */
HTMLOutput HTML_JS = new HTMLOutput() {
public void write(PrintWriter out) throws IOException {
MenuBar.writeJavaScript(out, pageName, reqState);
JavaScriptTools.writeJSInclude(out, JavaScriptTools.qualifyJSFileRef(SORTTABLE_JS));
if (!_listZones && mapSupportsGeozones) {
// MapProvider JavaScript
if (mapProvider != null) {
mapProvider.writeJavaScript(out, reqState);
}
/* start JavaScript */
JavaScriptTools.writeStartJavaScript(out);
// Geozone Javascript
double radiusMeters = DEFAULT_ZONE_RADIUS;
int zoneTypeInt = Geozone.GeozoneType.POINT_RADIUS.getIntValue(); // default
String zoneColor = "";
if (_selZone != null) {
zoneTypeInt = _selZone.getZoneType();
zoneColor = _selZone.getShapeColor();
radiusMeters = _selZone.getRadiusMeters(MIN_RADIUS_METERS,MAX_RADIUS_METERS);
}
MapDimension mapDim = (mapProvider != null)? mapProvider.getZoneDimension() : new MapDimension(0,0);
out.println("// Geozone vars");
out.println("jsvGeozoneMode = true;");
out.println("MAP_WIDTH = " + mapDim.getWidth() + ";");
out.println("MAP_HEIGHT = " + mapDim.getHeight() + ";");
JavaScriptTools.writeJSVar(out, "DEFAULT_ZONE_RADIUS", DEFAULT_ZONE_RADIUS);
JavaScriptTools.writeJSVar(out, "jsvZoneEditable" , _editZone);
JavaScriptTools.writeJSVar(out, "jsvShowVertices" , true);
JavaScriptTools.writeJSVar(out, "jsvZoneType" , zoneTypeInt);
JavaScriptTools.writeJSVar(out, "jsvZoneRadiusMeters", radiusMeters);
JavaScriptTools.writeJSVar(out, "jsvZoneColor" , zoneColor);
int pointCount = (mapProvider != null)? mapProvider.getGeozoneSupportedPointCount(zoneTypeInt) : 0;
out.write("// Geozone points\n");
JavaScriptTools.writeJSVar(out, "jsvZoneCount" , pointCount);
JavaScriptTools.writeJSVar(out, "jsvZoneIndex" , DEFAULT_POINT_INDEX);
out.write("var jsvZoneList = new Array(\n"); // consistent with JSMapPoint
for (int z = 0; z < pointCount; z++) {
GeoPoint gp = (_selZone != null)? _selZone.getGeoPoint(z) : null;
if (gp == null) { gp = GeoPoint.INVALID_GEOPOINT; }
out.write(" { lat:" + gp.getLatitude() + ", lon:" + gp.getLongitude() + " }");
if ((z+1) < pointCount) { out.write(","); }
out.write("\n");
}
out.write(" );\n");
/* end JavaScript */
JavaScriptTools.writeEndJavaScript(out);
/* Geozone.js */
JavaScriptTools.writeJSInclude(out, JavaScriptTools.qualifyJSFileRef("Geozone.js"));
}
}
};
/* Content */
final boolean mapControlsOnLeft =
ListTools.containsIgnoreCase(CONTROLS_ON_LEFT,privLabel.getStringProperty(PrivateLabel.PROP_ZoneInfo_mapControlLocation,""));
HTMLOutput HTML_CONTENT = new HTMLOutput(CommonServlet.CSS_CONTENT_FRAME, m) {
public void write(PrintWriter out) throws IOException {
String pageName = ZoneInfo.this.getPageName();
// frame header
//String menuURL = EncodeMakeURL(reqState,Track.BASE_URI(),PAGE_MENU_TOP);
String menuURL = privLabel.getWebPageURL(reqState, PAGE_MENU_TOP);
String editURL = ZoneInfo.this.encodePageURL(reqState);//,Track.BASE_URI());
String selectURL = ZoneInfo.this.encodePageURL(reqState);//,Track.BASE_URI());
String newURL = ZoneInfo.this.encodePageURL(reqState);//,Track.BASE_URI());
if (_listZones) {
// Geozone selection table (Select, Geozone ID, Zone Name)
String frameTitle = _allowEdit?
i18n.getString("ZoneInfo.list.viewEditZone","View/Edit Geozone Information") :
i18n.getString("ZoneInfo.list.viewZone","View Geozone Information");
out.write("<span class='"+CommonServlet.CSS_MENU_TITLE+"'>"+frameTitle+"</span><br/>\n");
out.write("<hr>\n");
// Geozone selection table (Select, Zone ID, Zone Name)
out.write("<h1 class='"+CommonServlet.CSS_ADMIN_SELECT_TITLE+"'>"+FilterText(i18n.getString("ZoneInfo.list.selectZone","Select a Geozone"))+":</h1>\n");
out.write("<div style='margin-left:25px;'>\n");
out.write("<form name='"+FORM_ZONE_SELECT+"' method='post' action='"+selectURL+"' target='_self'>"); // target='_top'
out.write("<input type='hidden' name='"+PARM_COMMAND+"' value='"+COMMAND_INFO_SELECT+"'/>");
out.write("<table class='"+CommonServlet.CSS_ADMIN_SELECT_TABLE+"' cellspacing=0 cellpadding=0 border=0>\n");
out.write(" <thead>\n");
out.write(" <tr class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_ROW+"'>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL_SEL+"'>"+FilterText(i18n.getString("ZoneInfo.list.select","Select"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"'>"+FilterText(i18n.getString("ZoneInfo.list.zoneID","Geozone ID"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"'>"+FilterText(i18n.getString("ZoneInfo.list.description","Description\n(Address)"))+"</th>\n");
if (showOverlapPriority) {
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"'>"+FilterText(i18n.getString("ZoneInfo.list.overlapPriority","Overlap\nPriority"))+"</th>\n");
}
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"'>"+FilterText(i18n.getString("ZoneInfo.list.zoneType","Zone\nType"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"'>"+FilterText(i18n.getString("ZoneInfo.list.revGeocode","Reverse\nGeocode"))+"</th>\n");
if (showArriveDepartZone) {
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"'>"+FilterText(i18n.getString("ZoneInfo.list.arriveZone","Arrival\nZone"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"'>"+FilterText(i18n.getString("ZoneInfo.list.departZone","Departure\nZone"))+"</th>\n");
}
if (showClientUploadZone == 1) {
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"'>"+FilterText(i18n.getString("ZoneInfo.list.clientUpload","Client\nUpload"))+"</th>\n");
} else
if (showClientUploadZone == 2) {
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"'>"+FilterText(i18n.getString("ZoneInfo.list.clientUploadID","Client\nUpload ID"))+"</th>\n");
}
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"'>"+FilterText(i18n.getString("ZoneInfo.list.radiusMeters","Radius\n(meters)"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"'>"+FilterText(i18n.getString("ZoneInfo.list.centerPoint","Center\nLatitude/Longitude"))+"</th>\n");
out.write(" </tr>\n");
out.write(" </thead>\n");
/* geozone list */
out.write(" <tbody>\n");
int pointRadiusType = Geozone.GeozoneType.POINT_RADIUS.getIntValue();
int polygonType = Geozone.GeozoneType.POLYGON.getIntValue();
int corridorType = Geozone.GeozoneType.SWEPT_POINT_RADIUS.getIntValue();
for (int z = 0, r = 0; z < _zoneList.length; z++) {
/* get Geozone */
Geozone zone = null;
try {
zone = Geozone.getGeozone(currAcct, _zoneList[z], DEFAULT_SORT_ID, false);
} catch (DBException dbe) {
// error
}
if (zone == null) {
continue; // skip
}
/* geozone vars */
int zoneTypeInt = zone.getZoneType();
String zoneID = FilterText(zone.getGeozoneID());
String zoneDesc = FilterText(zone.getDescription());
String zoneTypeStr = FilterText(zone.getZoneTypeDescription(locale));
String zoneRevGeo = FilterText(ComboOption.getYesNoText(locale,zone.getReverseGeocode()));
String zoneRadius = zone.hasRadius()? String.valueOf(zone.getRadius()) : "--";
GeoPoint centerPt = zone.getGeoPoint(DEFAULT_POINT_INDEX); // may be null if invalid
if (centerPt == null) { centerPt = new GeoPoint(0.0, 0.0); }
String zoneCenter = centerPt.getLatitudeString("5",null) + " "+GeoPoint.PointSeparator+" " + centerPt.getLongitudeString("5",null);
String checked = _selZoneID.equals(zone.getGeozoneID())? "checked" : "";
String styleClass = ((r++ & 1) == 0)? CommonServlet.CSS_ADMIN_TABLE_BODY_ROW_ODD : CommonServlet.CSS_ADMIN_TABLE_BODY_ROW_EVEN;
int pointCount = (mapProvider != null)? mapProvider.getGeozoneSupportedPointCount(zoneTypeInt) : 0;
String typeColor = (pointCount > 0)? "black" : "red";
out.write(" <tr class='" + styleClass + "'>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL_SEL+"' "+SORTTABLE_SORTKEY+"='"+z+"'>");
if (pointCount <= 0) {
out.write(" "); // not supported
} else
if ((zoneTypeInt == pointRadiusType) || (zoneTypeInt == polygonType) || (zoneTypeInt == corridorType)) {
out.write("<input type='radio' name='"+PARM_ZONE_SELECT+"' id='"+zoneID+"' value='"+zoneID+"' "+checked+">");
} else {
out.write(" "); // unrecognized type
}
out.write( "</td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap><label for='"+zoneID+"'>"+zoneID+"</label></td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+zoneDesc+"</td>\n");
if (showOverlapPriority) {
String zonePriority = FilterText(String.valueOf(zone.getPriority()));
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+zonePriority+"</td>\n");
}
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap style='color:"+typeColor+"'>"+zoneTypeStr+"</td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+zoneRevGeo+"</td>\n");
if (showArriveDepartZone) {
String zoneArrNtfy = FilterText(ComboOption.getYesNoText(locale,zone.getArrivalZone()));
String zoneDepNtfy = FilterText(ComboOption.getYesNoText(locale,zone.getDepartureZone()));
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+zoneArrNtfy+"</td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+zoneDepNtfy+"</td>\n");
}
if (showClientUploadZone == 1) {
String zoneUpload = FilterText(ComboOption.getYesNoText(locale,zone.getClientUpload()||(zone.getClientID() > 0)));
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+zoneUpload+"</td>\n");
} else
if (showClientUploadZone == 2) {
String zoneUpldID = (zone.getClientID() > 0)? String.valueOf(zone.getClientID()) : "--";
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+zoneUpldID+"</td>\n");
}
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+zoneRadius+"</td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+zoneCenter+"</td>\n");
out.write(" </tr>\n");
}
out.write(" </tbody>\n");
out.write("</table>\n");
out.write("<table cellpadding='0' cellspacing='0' border='0' style='width:95%; margin-top:5px; margin-left:5px; margin-bottom:5px;'>\n");
out.write("<tr>\n");
if (_allowView ) {
out.write("<td style='padding-left:5px;'>");
out.write("<input type='submit' name='"+PARM_SUBMIT_VIEW+"' value='"+i18n.getString("ZoneInfo.list.view","View")+"'>");
out.write("</td>\n");
}
if (_allowEdit ) {
out.write("<td style='padding-left:5px;'>");
out.write("<input type='submit' name='"+PARM_SUBMIT_EDIT+"' value='"+i18n.getString("ZoneInfo.list.edit","Edit")+"'>");
out.write("</td>\n");
}
out.write("<td style='width:100%; text-align:right; padding-right:10px;'>");
if (_allowDelete) {
out.write("<input type='submit' name='"+PARM_SUBMIT_DEL+"' value='"+i18n.getString("ZoneInfo.list.delete","Delete")+"' "+Onclick_ConfirmDelete(locale)+">");
} else {
out.write(" ");
}
out.write("</td>\n");
out.write("</tr>\n");
out.write("</table>\n");
out.write("</form>\n");
out.write("</div>\n");
out.write("<hr>\n");
/* new Geozone */
if (_allowNew) {
out.write("<h1 class='"+CommonServlet.CSS_ADMIN_SELECT_TITLE+"'>"+FilterText(i18n.getString("ZoneInfo.list.createNewZone","Create a new Geozone"))+":</h1>\n");
out.write("<div style='margin-top:5px; margin-left:5px; margin-bottom:5px;'>\n");
out.write("<form name='"+FORM_ZONE_NEW+"' method='post' action='"+newURL+"' target='_self'>"); // target='_top'
out.write(" <input type='hidden' name='"+PARM_COMMAND+"' value='"+COMMAND_INFO_NEW+"'/>");
out.write(FilterText(i18n.getString("ZoneInfo.list.zoneID","Geozone ID"))+": <input type='text' name='"+PARM_NEW_ID+"' value='' size='32' maxlength='32'>");
if ((mapProvider != null) &&
((mapProvider.getGeozoneSupportedPointCount(polygonType) > 0) ||
(mapProvider.getGeozoneSupportedPointCount(corridorType) > 0) ) ) {
ComboMap zoneTypeList = new ComboMap();
out.write(" ");
zoneTypeList.add(String.valueOf(pointRadiusType) , Geozone.GeozoneType.POINT_RADIUS.toString(locale));
if (mapProvider.getGeozoneSupportedPointCount(polygonType) > 0) {
zoneTypeList.add(String.valueOf(polygonType) , Geozone.GeozoneType.POLYGON.toString(locale));
}
if (mapProvider.getGeozoneSupportedPointCount(corridorType) > 0) {
zoneTypeList.add(String.valueOf(corridorType), Geozone.GeozoneType.SWEPT_POINT_RADIUS.toString(locale));
}
out.print(Form_ComboBox(PARM_NEW_TYPE,PARM_NEW_TYPE,true,zoneTypeList,"","", -1));
} else {
// only POINT_RADIUS supported
}
out.write("<br>\n");
out.write(" <input type='submit' name='"+PARM_SUBMIT_NEW+"' value='"+i18n.getString("ZoneInfo.list.new","New")+"' style='margin-top:5px; margin-left:10px;'>\n");
out.write("</form>\n");
out.write("</div>\n");
out.write("<hr>\n");
}
} else {
// view/edit
int selZoneType = (_selZone != null)? _selZone.getZoneType() : Geozone.GeozoneType.POINT_RADIUS.getIntValue();
// begin form
out.println("<form name='"+FORM_ZONE_EDIT+"' method='post' action='"+editURL+"' target='_self'>"); // target='_top'
// Geozone view/edit form
out.write("<table cellspacing='0' cellpadding='0' border='0'><tr>\n");
out.write("<td nowrap>");
String frameTitle = _editZone?
i18n.getString("ZoneInfo.map.editZone","Edit Geozone") :
i18n.getString("ZoneInfo.map.viewZone","View Geozone");
out.print ("<span style='font-size:9pt; font-weight:bold;'>"+frameTitle+" </span>");
out.print (Form_TextField(PARM_ZONE_SELECT, false, _selZoneID, 16, 20));
out.write("</td>");
out.write("<td nowrap style=\"width:100%; text-align:right;\">");
//out.println("<span style='width:100%;'> </span>"); <-- causes IE to NOT display the following description
String i18nAddressTooltip = i18n.getString("ZoneInfo.map.description.tooltip", "This description is used for custom reverse-geocoding");
out.print ("<span class='zoneDescription' style='width:100%;' title=\""+i18nAddressTooltip+"\">");
out.print ("<b>"+i18n.getString("ZoneInfo.map.description","Description (Address)")+"</b>: ");
out.print (Form_TextField(PARM_ZONE_DESC, _editZone, (_selZone!=null)?_selZone.getDescription():"", 30, 64));
out.println("</span>");
out.write("</td>");
out.write("</tr></table>");
//out.println("<br/>");
out.println("<input type='hidden' name='"+PARM_COMMAND+"' value='"+COMMAND_INFO_UPDATE+"'/>");
out.println("<table border='0' cellpadding='0' cellspacing='0' style='padding-top:3px'>"); // {
out.println("<tr>");
/* map (controls on right) */
MapDimension mapDim = (mapProvider != null)? mapProvider.getZoneDimension() : new MapDimension(0,0);
if (!mapControlsOnLeft) {
if (mapSupportsGeozones) {
out.println("<td style='width:"+mapDim.getWidth()+"px; height:"+mapDim.getHeight()+"px; padding-right:5px;'>");
out.println("<!-- Begin Map -->");
mapProvider.writeMapCell(out, reqState, mapDim);
out.println("<!-- End Map -->");
out.println("</td>");
} else {
out.println("<td style='width:"+mapDim.getWidth()+"px; height:"+mapDim.getHeight()+"px; padding-right:5px; border: 1px solid black;'>");
out.println("<!-- Geozones not yet supported for this MapProvider -->");
out.println("<center>");
out.println("<span style='font-size:12pt;'>");
out.println(i18n.getString("ZoneInfo.map.notSupported","Geozone map not yet supported for this MapProvider"));
out.println(" </span>");
out.println("</center>");
out.println("</td>");
}
}
/* Geozone fields */
out.println("<td valign='top' style='border-top: solid #CCCCCC 1px;'>");
// overlap priority
if (showOverlapPriority) {
String i18nPriorityTooltip = i18n.getString("ZoneInfo.map.overlapPriority.tooltip", "Priority used when multiple Geozones overlap");
out.println("<div class='zonePrioritySelect' title=\""+i18nPriorityTooltip+"\">");
int pri = (_selZone != null)? _selZone.getPriority() : 0;
if (pri < 0) {
pri = 0;
} else
if (pri >= OVERLAP_PRIORITY.length) {
pri = OVERLAP_PRIORITY.length - 1;
}
ComboMap priCombo = new ComboMap(OVERLAP_PRIORITY);
String priSel = OVERLAP_PRIORITY[pri];
out.println("<b><label for='"+PARM_PRIORITY+"'>"+i18n.getString("ZoneInfo.map.overlapPriority","Overlap Priority")+": </label></b>");
out.println(Form_ComboBox(PARM_PRIORITY, PARM_PRIORITY, _editZone, priCombo, priSel, null, 6));
out.println("</div>");
}
// reverse-geocode zone
String i18nRevGeoTooltip = i18n.getString("ZoneInfo.map.reverseGeocode.tooltip", "Select to use this zone for custom reverse-geocoding");
out.println("<div class='zoneCheckSelect' title=\""+i18nRevGeoTooltip+"\">");
out.println(Form_CheckBox(PARM_REV_GEOCODE, PARM_REV_GEOCODE, _editZone, ((_selZone!=null) && _selZone.getReverseGeocode()),null,null));
out.println("<b><label for='"+PARM_REV_GEOCODE+"'>"+i18n.getString("ZoneInfo.map.reverseGeocode","Reverse Geocode")+"</label></b>");
out.println("</div>");
// arrival zone
if (showArriveDepartZone) {
String i18nArriveTooltip = i18n.getString("ZoneInfo.map.arrivalZone.tooltip", "Select to use this zone for 'Arrival' checking");
out.println("<div class='zoneCheckSelect' title=\""+i18nArriveTooltip+"\">");
out.println(Form_CheckBox(PARM_ARRIVE_NOTIFY, PARM_ARRIVE_NOTIFY, _editZone, ((_selZone!=null) && _selZone.getArrivalZone()),null,null));
out.println("<b><label for='"+PARM_ARRIVE_NOTIFY+"'>"+i18n.getString("ZoneInfo.map.arrivalZone","Arrival Zone")+"</label></b>");
out.println("</div>");
}
// departure zone
if (showArriveDepartZone) {
String i18nDepartTooltip = i18n.getString("ZoneInfo.map.departureZone.tooltip", "Select to use this zone for 'Departure' checking");
out.println("<div class='zoneCheckSelect' title=\""+i18nDepartTooltip+"\">");
out.println(Form_CheckBox(PARM_DEPART_NOTIFY, PARM_DEPART_NOTIFY, _editZone, ((_selZone!=null) && _selZone.getDepartureZone()),null,null));
out.println("<b><label for='"+PARM_DEPART_NOTIFY+"'>"+i18n.getString("ZoneInfo.map.departureZone","Departure Zone")+"</label></b>");
out.println("</div>");
}
// Client Upload ID
if (showClientUploadZone != 0) {
String i18nUploadTooltip = i18n.getString("ZoneInfo.map.clientUpload.tooltip", "Select to use for client-side geofence");
out.println("<div class='zoneCheckSelect' title=\""+i18nUploadTooltip+"\">");
if (showClientUploadZone == 1) {
out.println(Form_CheckBox(PARM_CLIENT_UPLOAD, PARM_CLIENT_UPLOAD, _editZone, ((_selZone!=null) && _selZone.getClientUpload()),null,null));
out.println("<b><label for='"+PARM_CLIENT_UPLOAD+"'>"+i18n.getString("ZoneInfo.map.clientUpload","Client Upload")+":</label></b> ");
} else
if (showClientUploadZone == 2) {
out.println("<b>"+i18n.getString("ZoneInfo.map.clientUploadID","Client Upload ID")+":</b> ");
out.println(Form_TextField(PARM_CLIENT_ID, PARM_CLIENT_ID, _editZone, (_selZone!=null)?String.valueOf(_selZone.getClientID()):"", 5, 5));
}
out.println("</div>");
}
// geozone points section
out.println("<hr>");
if (_editZone && mapSupportsGeozones) {
out.println("<div class='zoneNotesBasic'>");
out.println("<i>"+i18n.getString("ZoneInfo.map.notes.basic", "The Geozone loc/size may be changed here, click 'RESET' to update.")+"</i>");
out.println("</div>");
}
/* shape color */
if (privLabel.getBooleanProperty(PrivateLabel.PROP_ZoneInfo_showShapeColor,false)) {
ComboMap colorCombo = GetColorComboMap(i18n);
String color = (_selZone != null)? _selZone.getShapeColor() : "";
String onchange = _editZone? "javascript:jsvZoneColor=document."+FORM_ZONE_EDIT+"."+PARM_ZONE_COLOR+".value;_zoneReset();" : null;
out.println("<div class='zoneColorSelect' title=\""+""+"\">");
out.println("<b><label for='"+PARM_ZONE_COLOR+"'>"+i18n.getString("ZoneInfo.map.shapeColor","Zone Color")+": </label></b>");
out.println(Form_ComboBox(PARM_ZONE_COLOR, PARM_ZONE_COLOR, _editZone, colorCombo, color, onchange, 10));
out.println("</div>");
}
/* radius */
Geozone.GeozoneType gzt = Geozone.getGeozoneType(_selZone);
if (gzt.hasRadius()) {
String i18nRadiusTooltip = i18n.getString("ZoneInfo.map.radius.tooltip", "Radius may be between {0} and {1} meters",
String.valueOf((long)MIN_RADIUS_METERS), String.valueOf((long)MAX_RADIUS_METERS));
out.println("<div class='zoneRadius' title=\""+i18nRadiusTooltip+"\">");
out.print ("<b>"+i18n.getString("ZoneInfo.map.radiusMeters","Radius (meters)")+":</b> ");
out.println(Form_TextField(MapProvider.ID_ZONE_RADIUS_M, PARM_ZONE_RADIUS, _editZone, (_selZone!=null)?String.valueOf(_selZone.getRadius()):"", 7, 7));
out.println("</div>");
} else {
out.println("<input type='hidden' id='"+MapProvider.ID_ZONE_RADIUS_M+"' name='"+PARM_ZONE_RADIUS+"' value='0'/>");
}
out.println("<div class='zoneLatLon'>");
out.println("<b>"+i18n.getString("ZoneInfo.map.latLon","Lat/Lon")+"</b>: ");
if (_editZone && mapSupportsGeozones) {
String i18nResetBtn = i18n.getString("ZoneInfo.map.reset","Reset Map");
String i18nResetTooltip = i18n.getString("ZoneInfo.map.reset.tooltip", "Click to update the map with the specified radius/latitude/longitude");
out.print("<input class='formButton' type='button' name='reset' value='"+i18nResetBtn+"' title=\""+i18nResetTooltip+"\" onclick=\"javascript:_zoneReset();\">");
}
out.println("<br>");
int pointCount = (mapProvider != null)? mapProvider.getGeozoneSupportedPointCount(selZoneType) : 0;
for (int z = 0; z < pointCount; z++) {
String latStr = (_selZone != null)? String.valueOf(_selZone.getLatitude(z) ) : "";
String lonStr = (_selZone != null)? String.valueOf(_selZone.getLongitude(z)) : "";
// id='"+PARM_ZONE_INDEX+"'
if (pointCount > 1) {
String chk = (z == 0)? " checked" : "";
out.println("<input type='radio' name='"+PARM_ZONE_INDEX+"' value='" + z + "' "+chk+" onchange=\"javascript:_zonePointSelectionChanged("+z+")\"/> ");
} else {
out.println("<input type='hidden' name='"+PARM_ZONE_INDEX+"' value='" + z + "'/>");
}
out.println(Form_TextField(MapProviderAdapter.ID_ZONE_LATITUDE (z), PARM_ZONE_LATITUDE (z), _editZone, latStr, 9, 9));
out.println(Form_TextField(MapProviderAdapter.ID_ZONE_LONGITUDE(z), PARM_ZONE_LONGITUDE(z), _editZone, lonStr, 10, 10));
if ((z+1) < pointCount) { out.println("<br>"); }
}
if (_editZone && mapSupportsGeozones) {
// "ZipCode" button
if (privLabel.getBooleanProperty(PrivateLabel.PROP_ZoneInfo_enableGeocode,false)) {
GeocodeProvider gcp = privLabel.getGeocodeProvider();
String i18nZipBtn = "";
if ((gcp == null) || gcp.getName().startsWith("geonames")) {
i18nZipBtn = i18n.getString("ZoneInfo.map.geocodeZip","Center On City/ZipCode");
} else {
i18nZipBtn = i18n.getString("ZoneInfo.map.geocodeAddress","Center On Address", gcp.getName());
}
String i18nZipTooltip = i18n.getString("ZoneInfo.map.geocode.tooltip", "Click to reset Geozone to spcified Address/ZipCode");
String rgZipCode_text = "rgZipCode";
out.print("<hr>\n");
//out.print("<br>");
out.print("<input class='formButton' type='button' name='tozip' value='"+i18nZipBtn+"' title=\""+i18nZipTooltip+"\" onclick=\"javascript:_zoneGotoAddr(jsmGetIDValue('"+rgZipCode_text+"'),'US');\">");
out.print("<br>");
out.println(Form_TextField(rgZipCode_text, rgZipCode_text, _editZone, "", 27, 60));
}
}
out.println("</div>");
out.println("<hr>");
out.println("<div class='zoneInstructions'>");
out.println("<b>"+i18n.getString("ZoneInfo.map.notes.header","Geozone Notes/Instructions")+":</b><br>");
if (_editZone && mapSupportsGeozones) {
String instr[] = mapProvider.getGeozoneInstructions(selZoneType, locale);
if ((instr != null) && (instr.length > 0)) {
for (int i = 0; i < instr.length; i++) {
if (!StringTools.isBlank(instr[i])) {
out.println("- " + FilterText(instr[i]) + "<br>");
}
}
}
}
out.println("- " + i18n.getString("ZoneInfo.map.notes.lengthInMeters", "Distances are always in meters.") + "<br>");
out.println("<hr>");
if (mapSupportsCursorLocation || mapSupportsDistanceRuler) {
if (mapSupportsCursorLocation) {
out.println("<b>"+i18n.getString("ZoneInfo.map.cursorLoc","Cursor")+"</b>:");
out.println("<span id='"+MapProvider.ID_LAT_LON_DISPLAY +"' style='margin-left:6px; margin-bottom:3px;'>0.00000, 0.00000</span>");
}
if (mapSupportsDistanceRuler) {
out.println("<b>"+i18n.getString("ZoneInfo.map.distanceRuler","Distance")+"</b>:");
out.println("<span id='"+MapProvider.ID_DISTANCE_DISPLAY+"' style='margin-left:6px;'>0 "+GeoPoint.DistanceUnits.METERS.toString(locale)+"</span>");
}
out.println("<hr>");
}
out.println("</div>");
out.write("<div width='100%'>\n");
out.write("<span style='padding-left:10px'> </span>\n");
if (_editZone) {
out.write("<input type='submit' name='"+PARM_SUBMIT_CHG+"' value='"+i18n.getString("ZoneInfo.map.change","Change")+"'>\n");
out.write("<span style='padding-left:10px'> </span>\n");
out.write("<input type='button' name='"+PARM_BUTTON_CANCEL+"' value='"+i18n.getString("ZoneInfo.map.cancel","Cancel")+"' onclick=\"javascript:openURL('"+editURL+"','_self');\">\n"); // target='_top'
} else {
out.write("<input type='button' name='"+PARM_BUTTON_BACK+"' value='"+i18n.getString("ZoneInfo.map.back","Back")+"' onclick=\"javascript:openURL('"+editURL+"','_self');\">\n"); // target='_top'
}
out.write("</div>\n");
out.println("<div width='100%' height='100%'>");
out.println(" ");
out.println("</div>");
out.println("</td>");
/* map (controls on left) */
if (mapControlsOnLeft) {
if (mapSupportsGeozones) {
out.println("<td style='width:"+mapDim.getWidth()+"px; height:"+mapDim.getHeight()+"px; padding-left:5px;'>");
out.println("<!-- Begin Map -->");
mapProvider.writeMapCell(out, reqState, mapDim);
out.println("<!-- End Map -->");
out.println("</td>");
} else {
out.println("<td style='width:"+mapDim.getWidth()+"px; height:"+mapDim.getHeight()+"px; padding-left:5px; border: 1px solid black;'>");
out.println("<!-- Geozones not yet supported for this MapProvider -->");
out.println("<center>");
out.println("<span style='font-size:12pt;'>");
out.println(i18n.getString("ZoneInfo.map.notSupported","Geozone map not yet supported for this MapProvider"));
out.println(" </span>");
out.println("</center>");
out.println("</td>");
}
}
/* end of form */
out.println("</tr>");
out.println("</table>"); // }
out.println("</form>");
}
}
};
/* map load? */
String mapOnLoad = _listZones? "" : "javascript:_zoneMapOnLoad();";
String mapOnUnload = _listZones? "" : "javascript:_zoneMapOnUnload();";
/* write frame */
String onload = error? (mapOnLoad + JS_alert(false,m)) : mapOnLoad;
CommonServlet.writePageFrame(
reqState,
onload,mapOnUnload, // onLoad/onUnload
HTML_CSS, // Style sheets
HTML_JS, // Javascript
null, // Navigation
HTML_CONTENT); // Content
}
// ------------------------------------------------------------------------
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Gui2D.SpriteController.SingleSprite;
import Gui2D.SpriteController.Sprite;
import javafx.geometry.Rectangle2D;
import javafx.scene.canvas.GraphicsContext;
/**
* PlayerSprite which is a player who can move based on as sprite sheet This
* class functions with every sprite sheet on
* http://gaurav.munjal.us/Universal-LPC-Spritesheet-Character-Generator/
*
* @author jonas
*/
public class PlayerSprite extends AbstractSprite {
// Directions variable for keep controll over which direction the sprite is moving
private Direction direction;
/**
* Animation counter and delay for rendering purposes. animationCounter is
* used for controlling which sprite animation we have come to.
* animationDelay is to delay when to change the sprite animation.
*/
private int animationCounter;
private int animationDelay;
/**
* Constructor for player
*/
public PlayerSprite() {
//set position to 0 as a start
super.setPosition(0, 0);
//set velocity to 0 (we dont need to move on create)
super.setVelocity(0, 0);
//set our direction we are facing to standstill
direction = Direction.STANDSTILL;
//animation counter for rendering
animationCounter = 1;
animationDelay = 1;
}
/**
* Our direction the player is walking.
*/
public enum Direction {
STANDSTILL, WALK_UP, WALK_LEFT, WALK_RIGHT, WALK_DOWN;
}
/**
* Our render function which draws our image on the canvas's GraphicsContext
*
* @param gc used to render on.
*/
public void render(GraphicsContext gc) {
//check which direction this object is facing.
if (getDirection().equals(Direction.STANDSTILL)) {
//draw the animation using standstill sprite.
gc.drawImage(getImage(), 8 + (64 * 0), 8 + (64 * 2), super.getSprite_width(), super.getSprite_height(), getPositionX(), getPositionY(), super.getWidth(), super.getHeight());
} else if (getDirection().equals(Direction.WALK_DOWN)) {
//draw the sprite using walk_down animation.
gc.drawImage(getImage(), 8 + (64 * animationCounter), 8 + (64 * 10), super.getSprite_width(), super.getSprite_height(), getPositionX(), getPositionY(), super.getWidth(), super.getHeight());
} else if (getDirection().equals(Direction.WALK_LEFT)) {
//draw the sprite using walk_left animation.
gc.drawImage(getImage(), 8 + (64 * animationCounter), 8 + (64 * 9), super.getSprite_width(), super.getSprite_height(), getPositionX(), getPositionY(), super.getWidth(), super.getHeight());
} else if (getDirection().equals(Direction.WALK_UP)) {
//draw the sprite using walk_up animation.
gc.drawImage(getImage(), 8 + (64 * animationCounter), 8 + (64 * 8), super.getSprite_width(), super.getSprite_height(), getPositionX(), getPositionY(), super.getWidth(), super.getHeight());
} else if (getDirection().equals(Direction.WALK_RIGHT)) {
//draw the sprite using walk_right animation.
gc.drawImage(getImage(), 8 + (64 * animationCounter), 8 + (64 * 11), super.getSprite_width(), super.getSprite_height(), getPositionX(), getPositionY(), super.getWidth(), super.getHeight());
}
//If the animationCounter is 8 reset back to 1 since there are no more animations after 8 sprites.
if (animationCounter != 8) {
//only update the animationCounter each 4th times the render function is called.
if (animationDelay == 4) {
animationCounter++;
animationDelay = 0;
} else {
animationDelay++;
}
} else {
animationCounter = 0;
}
}
/**
* checks if this intersects with anything.
*
* @param s sprite
* @return boolean
*/
public boolean intersect(Sprite s) {
//Since the player sprite is not 100 % accurate we have to tweek its boundaries so it will look natural.
Rectangle2D player_boundary = new Rectangle2D(getPositionX(), getPositionY(), getWidth(), getHeight());
return player_boundary.intersects(s.getBoundary());
}
/**
* Check if the player is intersecting N into another sprite.
*
* @param s sprite
* @return boolean
*/
public boolean intersects_top(Sprite s) {
//Since the player sprite is not 100 % accurate we have to tweek its boundaries so it will look natural.
Rectangle2D player_boundary = new Rectangle2D(getPositionX() + 15, getPositionY() + getHeight() - 22, getWidth() - 47, 5);
return player_boundary.intersects(s.getBoundary());
}
/**
* Check if the player is intersecting S into another sprite.
*
* @param s sprite
* @return boolean
*/
public boolean intersects_bottom(Sprite s) {
//Since the player sprite is not 100 % accurate we have to tweek its boundaries so it will look natural.
Rectangle2D player_boundary = new Rectangle2D(getPositionX() + 15, getPositionY() + getHeight() - 13, getWidth() - 47, 5);
return player_boundary.intersects(s.getBoundary());
}
/**
* Check if the player is intersecting W into another sprite.
*
* @param s sprite
* @return boolean
*/
public boolean intersects_left(Sprite s) {
//Since the player sprite is not 100 % accurate we have to tweek its boundaries so it will look natural.
Rectangle2D player_boundary = new Rectangle2D(getPositionX() + 11, getPositionY() + getWidth() - 20, 5, 10);
return player_boundary.intersects(s.getBoundary());
}
/**
* Check if the player is intersecting E into another sprite.
*
* @param s sprite
* @return boolean
*/
public boolean intersects_right(Sprite s) {
//Since the player sprite is not 100 % accurate we have to tweek its boundaries so it will look natural.
Rectangle2D player_boundary = new Rectangle2D(getPositionX() + getWidth() - 32, getPositionY() + getWidth() - 20, 5, 10);
return player_boundary.intersects(s.getBoundary());
}
/**
* Check if the player is intersecting N into a Rectangle2D.
*
* @param worldBoundary Rectangle2D
* @return boolean
*/
public boolean intersects_top(Rectangle2D worldBoundary) {
//Since the player sprite is not 100 % accurate we have to tweek its boundaries so it will look natural.
Rectangle2D player_boundary = new Rectangle2D(getPositionX(), getPositionY() - 20, getWidth(), getHeight());
return player_boundary.intersects(worldBoundary);
}
/**
* Check if the player is intersecting S into a Rectangle2D.
*
* @param worldBoundary Rectangle2D
* @return boolean
*/
public boolean intersects_bottom(Rectangle2D worldBoundary) {
//Since the player sprite is not 100 % accurate we have to tweek its boundaries so it will look natural.
Rectangle2D player_boundary = new Rectangle2D(getPositionX(), getPositionY(), getWidth(), getHeight() - 20);
return player_boundary.intersects(worldBoundary);
}
/**
* Check if the player is intersecting W into a Rectangle2D.
*
* @param worldBoundary Rectangle2D
* @return boolean
*/
public boolean intersects_left(Rectangle2D worldBoundary) {
//Since the player sprite is not 100 % accurate we have to tweek its boundaries so it will look natural.
Rectangle2D player_boundary = new Rectangle2D(getPositionX() - 25, getPositionY(), getWidth(), getHeight());
return player_boundary.intersects(worldBoundary);
}
/**
* Check if the player is intersecting E into a Rectangle2D.
*
* @param worldBoundary Rectangle2D
* @return boolean
*/
public boolean intersects_right(Rectangle2D worldBoundary) {
//Since the player sprite is not 100 % accurate we have to tweek its boundaries so it will look natural.
Rectangle2D player_boundary = new Rectangle2D(getPositionX(), getPositionY(), getWidth() - 25, getHeight());
return player_boundary.intersects(worldBoundary);
}
/**
* Return position and velocity of the class
*
* @return String
*/
@Override
public String toString() {
return " Position: [" + getPositionX() + "," + getPositionY() + "]"
+ " Velocity: [" + getVelocityX() + "," + getVelocityY() + "]";
}
/**
* @return the direction
*/
public Direction getDirection() {
return direction;
}
/**
* @param direction the direction to set
*/
public void setDirection(Direction direction) {
this.direction = direction;
}
}
| |
package com.eason.core;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
/**
* Created by fengyingsheng on 2017-9-14.
*/
public class HashSetTest {
public static void main(String[] args) {
testDate();
Set<String> s1 = new HashSet<String>();
s1.add("a");
s1.add("b");
s1.add("c");
Set<String> s2 = new HashSet<String>();
s2.add("a");
s2.add("b");
s2.add("d");
s1.retainAll(s2);
Set<String> s3 = new HashSet<String>();
s3.add("a");
s1.removeAll(s3);
System.out.println(s1);
}
private static void testDate() {
Calendar calendar = Calendar.getInstance();
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
System.out.println(calendar.getTime());
long lastHour = calendar.getTimeInMillis() - (1 * 3600 * 1000);
System.out.println(new Date(lastHour));
String a = "iVBORw0KGgoAAAANSUhEUgAAAMgAAAB/CAYAAACql41TAAAAAXNSR0IArs4c6QAAAARnQU1BAACx\n" +
"jwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAHtiSURBVHhe7f11WF1bmvUNv39/1ytPS1V16XE/\n" +
"8Zy4kqAJEDS4BwnEnbg78UBwd3d3d4cEAoEEiHuCbMj4xlwbUtTprnR1dZ1TTz29J9e45rJta9+/\n" +
"ed9jrbUX/xcUTdEU7c82BSCKpmgfaApAFE3RPtAUgCiaon2gKQBRNEX7QFMAomiK9oGmAETRFO0D\n" +
"TQGIoinaB5oCEEVTtA80BSCKpmgfaApAFE3RPtAUgPwDtjG8w7sxucbejUE2OooRTsvGxjA6NkqN\n" +
"YYySjXKbYU4PySCTyZe/e8ft+Hg+cPzZFO1DTQHIP2AbY5ALOEYY8C9GR/Ccwf9WwDEyBNnICIYI\n" +
"yaBsiPMEY3gYI89fYejpSwyOECQCNSbAUgDyFzUFIP+ATQKEEiA8GB1G5eAwql4P4r5MhqGRtxh+\n" +
"9hh307LRefI6uvdcxhOPOAwnlEPW9QBDo2MgWQRFAchf0hSA/AO20cklliipRofwcugtmmtqkbf7\n" +
"GFLnrUHtNHXcU7HEkP1pDO32hOxgCGTe2Rh+8RZjhESmAOQvagpA/gGa8BNDLJ1ey0bwcoQlE0up\n" +
"Ec6/ev0a3V09iPAOgquWMXZ+NhvXP/4ByV8r4fZCc8gM92LM/ARkdmcgc7kA2WZ3yHKbMEiwhB9R\n" +
"tP+8KQD5B2jDzBIPh99igF6j9uUgbt/rRUdtHaIuXIfdEnUY//o7uFAnfz0NoR8vQPVsPbzS2oJ3\n" +
"xgcwZnoU78yPY9T4MIYN9uPt8WCMDsowPCobf3ZF+1BTAPIP0F5xxG+kz6jr7UNxZi5i952C2wpD\n" +
"7P5oNrb/fhoO/H463Jg5wr9aipq5Bnis7oIRnZ0YM3TFu7UHMUYNau/Ck+Xr8HItoRl4ChmhU7T/\n" +
"vCkA+RmbMNej1ITJlh+KlXGePuKdDLKhQYw9eYmxnkeQUaNDI9LRpsG3wxho68Wt0ByUHvBC1QY3\n" +
"NLq4oXrdEeSZ7UD+mg2oWbkOXUoOeKy2AW+1t2FozQ68pUbW7MGQ1nY8IzQ9C4zxWtkZox336Fv+\n" +
"voC8Y4X3jl7onfjc3A9jz4cwfOchRut6Iau+g5Ge+5ANvpEOYYv9JHyT/LD2z+udFID8jE0ChF/2\n" +
"qDh3Ib50aoQj+djLIYy29mMktQ4yv0KMXcnF0OVMvKGplhW343VOLV5djsHrg4EYcg3A2N4gyA6E\n" +
"4O3hMLw8HoWXhyPw4lAQBnf7YMjxHN6s3Ys3qzbhrfpGvNXYiJcqTuhfbo1bs3TxarE1ZB19GGW5\n" +
"9vds7/jZRZknBgjZwGu8zG3H82vZuOkWj1r3BHTdSMaLzGoMvngl7StxSHtkHJafsykA+RmblDU4\n" +
"Ekoj4viXPvb4FUZiSiE7Fo2RvSEY3UII1vthZFMwhneG4vl2b7xdfw0P9LajTkML9Yar0Wijj1sO\n" +
"Fui2sUPPuk14uPkEhnbdwCiBGTwVC9mpOIzs9MdLqzN4rLwejxZao3eGAdq+1sCz5TaQ9T3GGE3+\n" +
"37O94yAxQkhl/S/xNqYaD08GIT44G1da+nGs7RHc2gaQnFmFx0FpkD3iPuJ+GxLZ9mfOfApAfsYm\n" +
"yivpRB2/7NEXb/C26S5GQwswtOkihq2PYsjyGN4YHMAbs5N4a8VltlcxbOeJ5zbXcN96D3pCN6Cr\n" +
"xha9zfbob3dBf9smdFTYoynWFHXnTdHubInHzocwut8bw4dCIDsdh9fHwnDXYBdapmmh5TsCou2M\n" +
"IfqZd39nDzI2xCza8xLDiY0YvZKG3J0B2Nt1Hxs7n2FL5ws43XqOjV1P4dt0H48DMjH04IUEyM99\n" +
"9E0ByE/cRmVDBIJegiOfqPtHRhgYd1/hTWQ9xi7noVB7PY5+NAWRGtboszyMfhU7DCjZ4bnmZgzq\n" +
"7scb3YN4prETAys34aaFJVpi16K13gw3W63R1myJmy3WuN1qi55b63Hn5nq0ZTui+bAx7livx+sd\n" +
"V/B2rz/enozBA5ZfnfQqD1wvYFj2VjqrLhOwUnIv8NPX9u9kct81JgL9wVuMpndj6FIiHti74WB0\n" +
"KdZ3vYRTy2PYNN6DTUOfJLumAZy99RB3Q/Iw2NmL4beyn+W9TjQFID9xezfyTroOSmiYZc3IkAyD\n" +
"hR0YPZeDoTPZeOuRgYfH/fFitzvumm1Enr4asszVUamthzsrLdC32BK3l1qhYa4JKmatQeUhQ7RW\n" +
"W6O92QatTdZoabRCc501oVlHOXDeDi23NqG1aANar5nh9g4nPN58Cq9338DQiTC8TSjF4EuaX74P\n" +
"6dosEbCi/xmCTpzgFCcohSEfrWSwB5RgZH8oYl0uYUvzPcJwHw61/bCouQNzGnWzqm6pd6zvwbWG\n" +
"frSn1OHtnX7pvf5ckCgA+YmbTGSMN2/xuLEFD7JL8LSsBW/DyzFyMAGD5+Nxb885NFrbonW9GWqO\n" +
"66AmVhcFKWoo8FJH8a5VKNZdjaLlq5A1aynilyxByY21aG+wR6sAocEG9TXmqKowQm2VGWWB2kpz\n" +
"NFQRqFo71NU7ojqV06dN0LnOHi/sWMYdDsDbuBLIuh5K12oJOIQJ/lkAEUfrxMWTHc8gi6rByKkY\n" +
"9Ky/gKNeabBu6odlbS9MK7pgVHoLhiU3YVDcLvWmBGVD/QA8bvajhxKZWAHIP2gT5lM6JMkSX7qY\n" +
"8PETdHiHosHlGLpcb+DBiWgMnkiiT0jA0xOeyN+uh9yrqugqsUdPkwNuMyN0tNigpc4cdcUWKIzU\n" +
"QeY1ZWSd0kBRgCFaqkT2sEUDwairNiUUzCzlBigrNUBJkT5KiwxRTJUUGqKy2BjVpSaoKjJDmbch\n" +
"6lyM0G+0AcMObhh2T8BQWTtGn9AHiAwnzq4z5thJh1N/iibKzLEngxjN6YDschZGdvkiyfEUHLMb\n" +
"YFHZA+Py29ApboNeQSt08puhW9ACvcJWrCUkllVd2NH2APEPBvFieITvVxj2nx5sBSB/4yaNyByZ\n" +
"R2QyPKmoR9Xe04iZp4kW0wN4dZxg7InG2NZwjFJvdwSiY/tJFByzQ4a3Fto56t9stkJ7jRna6i3R\n" +
"WG2C5loLtDUQmlYbaV1zLQ15tTGqyg2ptahkL8AoLtRDQd4a5OfqojBPX1JJ4VoUFxihssQU1SXG\n" +
"KEtci/Kz+qgxMMQr06N4u8cHQxH5GL51l6XfCP3SGEdn4RN+IkCGCGLrA4wGlmL0SCRerL+My2dC\n" +
"YVHSCaOyDgkGAcZkiWV6hS0wLL0Ju5penOh8iuaXg4R6GGMEWwHIP1iTjlTJRvC4vR1l1nuQMlsP\n" +
"Ocss8NDlKoa3BGHU6ipGzS5h1PY63jh4Y9juGm7rbEXKGn0EbFqBzDAdNNVao6bUmIBYoK7CFOUM\n" +
"/rZGSy43RUM1y6lKBny5CSrLGPRF8swh4MjL0ZIAycvWQXaGFiExkAApKzRCTYkJGkosUFFCSCIM\n" +
"kW+/Gj266/DG/iyGz8ZhJLsOskfP8UacjKOZ/ina2MO3GElvw9iFNAzv9EGD/XlsjSpi8HdJpZR+\n" +
"EbOHBESrlD0mpJfPZcU3YVLehQ0sxZIev8YrZpFRvtefutJSAPI3buISjqE3r9B22hedDhfxdGsw\n" +
"nhGCyOlaiJqvh/varnhleATPqRdrdmNgxSb0KbmgbpEZKrTtkGNkiWAXNWTHmaCuzIF+w4GwGKGq\n" +
"mGIWqK+wIBxmKOd8BedLCw0IhTYK83WkPidTS4IjK10T+Tm6lAG34WOLuH25KWoIXCM9S0MxAfPS\n" +
"R6WpNp4Zb8XwdoJ7NRFjtT0Ypm/6Kdpo/X2M+JUxe0RjaKMHwvd5wj6rFcY1t1leybPIBCgCjDV5\n" +
"TZL0829Ct7gD+iW3YVPdh8vdT9AzOCwd+FBkkP8Nm8gS4my4+PXe0Bjr4XfikhCWD6MyDLK0ul/f\n" +
"jPZj3ni1JwYjzBpDm7zRZ3YUpSusED11BWqVbNCvth13V7qgYdZaVEzTRtUcAzywOCipS287olas\n" +
"xiW9BYi9sQY1ZZaop/fIzViD0hIR6CyxGOgCiuJCllOirKKK8uhnstYgI5WmnoDkZDKrZOtxWpug\n" +
"iDKMGYWqKDOhubdAhzgCVm6L8pP6qDfQwXPj7ZDt8MFbv0wMtvZB9naIpYw4RC2MvPjFIkdszn+o\n" +
"vRO/YqSEkR7l/hihpB9z9b3CcEQLXp9JwtvN3qi3d8MhryTYlvXCrKYfJhW3sZbmfAIOUV5NACIk\n" +
"5oVpN+d2O5seo/L5MIZHRxSA/O/YhIkd5pc+yBFs9Nkghh6/wOjjN5DdfoThmru4fTUBd7dcx+sN\n" +
"DLaN1/DE/jR1Ah3621Gpsg7J0zRRPNcATbMNUT5DF7FfqaBZZzMemR/EgPFe9OjtRI/GFtzWYuml\n" +
"boDQdVpI9NJBQY4RqosskM2Az83UQVG+IXIIhABDAJKfvUbKIHI4tKU+I1UT6SmrJUgEPIXctqSI\n" +
"ULLkEtmotcEUt9qt0VJoiZKDqrhpZsqy6xgG9/tiNKYYMvoT2cvXkq+SrosiLB9q4pyKOL8iDucO\n" +
"ch+NvBnGaNcjjEVXY+hEEoY3e6HX6TKunAqAaWkrLGvvw7xGGPTOPwuIdm6jNC/WmdLIO9f2IWrg\n" +
"FV4oMsj/nk0y4kzxox2PMBLXhMEbeZDF1GHYpxQj57IxeCAWQ9uCaEI98cjuBAasDhOQU3jC6T7T\n" +
"fWjUXI/iRYYoWmiM2pV2uEco+qhOAtShuxVtWpvQpO2M5lUb0LzaBSUaVghVX40AF2UkBRsgL9MI\n" +
"JQXmBMKIgc/SimDkEoy8LHmJlZkmgBBgrHoPiFBa8ipJmWlaUnYponepL6U3KbdAW40JbrXZoT7N\n" +
"CvXH9HDHxQGPHQ5jcJ8vZGF5GK3thOzxS+l6qA81sW+GxwiTKIHuPcNIfjuGfPn4w5F4vcULt8yP\n" +
"4YqrBxwzamBeexeWLJlEiSXKKwGIyBLCg0yAMSExL5YblXbCrv4uTnc/Q+/bYQUgf68m9rtc4gLD\n" +
"8atvxTRHRxEAw7cfYiSoArITGXjs6IGRS1kYORAH2dZwyLaFYnBDAF7a38CDdafxwvEcntiewiPr\n" +
"YxggID1rd6HXYAd6jQ5gwGQ/ug124qbeVjSs2YgmC1dUG2xEncZ6VGo4om6VI2rV7FCjYofMFcYI\n" +
"1V6FwF0qSAlmyVRgicwUXeRk6CItiRlD+A8pg8izSFqKOlIS1ZAcr0YwCEjSas6vIizcLp1QZYjS\n" +
"S5RpeqgsNUBtuR5aGkzQ1myD+lQTNJ8zRe82J/Tb7cCjrWcxFJiBkYoOjN1+gLEnLzDGzPLuzSDG\n" +
"BlmGiUOvQxw0nr7G8K1+jJTcwlBwKYZOJUG2OwRvna+h2vw4zu+9DjsGvGXDACwY6CJ7mFeJcx/0\n" +
"H4TDYJL/mAyIkA4BMSjpwLr6TlzuakP36yf8TuRZRGDyU6CiAOTPNAGDJMIxKhM1tfyMs7hoTvxs\n" +
"dSSzFSMn0iHbF4cKpXVotTyEYSeCYkc53oBsvRdGrK7iodERPDY+hD6D3fQh+5gtXAnIbvRSN9fu\n" +
"wU2DrWjV2YQW4z1oPuaF6rgs1AXFoGLDEZSqWqNSzQYV6vaoUrVDmaotirgsdpkB/PSUEXB4BRIi\n" +
"GOjZaxn0ugx6HWSkyMsqkUVSEpUJhzISY5l5YtWQFK8uSUCSmsTskqKBTOFXuG1OpgZLNi2UFeui\n" +
"ptIILbUWuFlniZZMM7T5W6LjjB3u7NiIHidXPD3Iz+gej9FgZoaoEozFV2AstRay5CoMhxdg+FIC\n" +
"RvYHY2S7N147XUKnyXHEscw8dDUKNiWtcGx6AJvxE4PiTLlpZReMCYhBYRv0C1qhm/+nJZaY1hbA\n" +
"FLZAl1nmaFsJkvqyUPvsDkboAcW5JwmSn4AQBSB/pgmfIeroIe58ccJv9O0ohrufYKSqByNpzRg+\n" +
"n4nhHVHMGMGo5wjfoOGEJmUXvDY6jmHTUxgxOYnHGntwf7kzmuZaoF3NGX3mB3DHeDe6jXajkxmk\n" +
"1WA7WrQ2omHvZeQn5eN4Ui6co9NwvqgSNdXNaLwRgkI1W1SuckCFpgNKCUuxshVyVxghe6k5Qpdo\n" +
"wctEGeEezArpBkhN00dctDqzhRalgYTYFUiKW4mE6JWIi2Qfo4rEODUuZ0+JdYncJiVBhdlFhZlo\n" +
"laTiAvqUkrWoLV6LliortNWx9KqkoS+ywe1EK3T5OOH2ASt0brRDj60L+s22YMBsBx5a7sVj04Mc\n" +
"FPag32g7cg02wWPTMZzzjMX2rFq41PfBueURM8A9ZpAeWIjsQUCEQf9LAdEuacPl1mTUPAlA/dMa\n" +
"vJK9lTyPGLzY/c2bApA/0+S/QaAJF3W3KCtiGjESWgOZP8sqjyKMnsrC6N54jGz0xeN1Zxjwu5G+\n" +
"jMZ3kYUExYPFTrgzl6XKNF0kfrIMbTThXcwSXQSkw3AHmrUJht1+3IrNQnBRDVwyy2Bd3Aqz4g7Y\n" +
"FrXAJa0Cqa130JKUjSJHVxQLONRtka9ihcKVlihQtkDGMmMkLdaH98ql8Nq+AulJxswEhswKugjy\n" +
"n8fsIUBgliEg8VHKiBOKVkZ8jAphUZHWCcVHL+f8ck4rERRVlm0ayEqm0c/RY1bRQ1XpWtSUi0PM\n" +
"JmisNEFrLf1KrRV66pxwt8oFd0o2oD3VAa0JjmiLNEdbhCVqw8xQGL8OJ0JdsSktE2b5lXBovQvH\n" +
"hl7C0Qerhv7311v9VwDRKenElqYaFPd7IWWgDH2vH0uDmLiubExcCvA3bv+jARF2U9xEbeJXfgII\n" +
"cTsccbTm7agMQ7ce47VHAV5u8cErlyuQXcjE2MksyFxjMbYrDmPbo/DA7CAeOLmh2+II7lofQaGS\n" +
"FVKmayLuK2UEf7YEsVNU0GbginsWp9BlsBe36TdaDbei80oQchvbsSG2EBY5zXBkPW7HssOq7g77\n" +
"XtiX3YIO13k1daCjoxOlx64gS9WKGcQG+SuskL/MgqBYIX2FGdKWrIXfvJW4ZLIUkb5aKMgyQVKM\n" +
"BqIilBEZuoKgaBAKJYIgh0WeUVYgJpxwRAlICBAhiYtaJklMZ6aqS8rJEOWXODqmg4JcXZQWG6Ci\n" +
"lNCUG6Cexr6BsDTVm0pqbjRnaSauArBAB31Mc6MlKiqsccLfFo5JcTDOq4ZxwS1YV/XCqrqLBp3l\n" +
"VXknTCSDfhOGJfQgxcKDNBES+o6cOujkNUC7oA5aBY3QEoBwO7PKW7h6Kxe5jzMR8uQZXvB7G34n\n" +
"LuGXf69/y/Y/GhBRRomrWcXoI5VRslGMyTgtLuLre47h4EoMHUlB0QpnnJq1HC/Nz2B4WyCzRjBG\n" +
"N7O8Wh+MZ6an8WDjZbzc4YFnGy7grtVBNOltQp2WM9pNdqLXfB9Lqj3oMHFFu/V+1LteQXFGOdzK\n" +
"G2CWUQTnmw9hxlHVvK4LFrUsN1iPG3I0Vc2qgH16NTakZ6KmvQO3KipR6rwXeao05ivNkbLUBOmL\n" +
"jZE1zwjpc42RNtcEQbPUcXnNAoSdV2cZRQ8Su4aZQR1hQUsZ+CqEYQIOJUmxESsQG7kcMRFLqSWS\n" +
"YiPFtsukXmQVUX5lSp6GSl+NvBxtZGeoswxbg5JCXUoP5SzFKkqMpMPNFUU0/AKiIn1UFumgvkib\n" +
"vSmuhRrCIdgD5rkZMCyqh1HhTRgxW8p1Cwb5zB6UXh4zSB6zRw5NOY256PVzCUxuC/QKCFFxJ4wq\n" +
"+mFa0w/b9lfY3fMGLS/F98Zsz+/wb93+RwMiN+EiNXMHD8sw/HoIo2+G8VacIMtvh+x4BkboM26v\n" +
"2ok+LZrpRTZ4ZHMOY07+GHP0w6iDDwbNLuK1wyXctz+Dh46nWYsfwN21e9FrvBd3DWm8dbeg02Iv\n" +
"6necR2VEJoLLmrAurRTm5R2wqadRrbsHq5peaRQV1xvpZddCP7Ucdgk52JmSheT8MpRHJyPCahNi\n" +
"lVhC6Tgh28gFaSsskLZgLeIX6CFxsS7iF1JztBE5TRuey1bA21UJUeE05XH0IjGrCIM6EmNo1GMF\n" +
"KH8su6LDliI6fAmiwhZJig5fLCkqdDEfs1zKOolxKjT29DbiqFiSMlJp/icyjJA4xJyfrc1pDeSx\n" +
"z83SpMQyXRRm66GAy4roa64Hm2Kzz1FYREbDOLUA+mmVMKI3EdLNroF2ZiW0Miqxmr1aejlWZVVB\n" +
"PaMCWlnV0OR6zTxmFWYYg4pOGNPg2zU/hPPtN4h+OsSML6qAv/2PwP7HASIdsqWke9rKZJA9fQNZ\n" +
"71PImgcwWH4bI9RwYy9GAsog25fMjBGOtyZn8NaGAFgewYDBIQzaXMawuRuGjE7hxep96F+9A4/s\n" +
"TqLP4jB6jfahx/QAbpuynKJhvbnxDGqvhyO6oAoHK5tgWdos/d7BulEOhwlLDZPK29ArbGaQVMOc\n" +
"Rn1Hcha8MvORG5eOhKNn4aNiBP/lekjZdASx/jGIS01F2tHLSFpth+i52kiaq0dADJA0X0gbUbPX\n" +
"wGuxCnzsVyLKnaY8ZjWiI5lVojQQyywSG6WEhPEsEkMIYsKXEY7FLMcICWGRZxSRWZYzk3A7AVOM\n" +
"MkFRRlKCMp9P6b1/SYoTJl+u5PgV9DDikLIaMgQ8BCozWUM6WpaRrIrUOFUEhKnhoJ8jHIMuwyYu\n" +
"FpZJmTBJzoN5WhmMk4pglFgIvbg8Kh+GCXnQjkqFcWIG9GLTYJCcDwPCosfMsra6h/vwATZ0vMD5\n" +
"+69xm5l/bOxvfyuj/3GAyIYJhjhUK47Zt/bjrThWH1mPdyHNGPUuw8iVXMgu0WfsjcHojgi8cPLA\n" +
"C8fLeGolADmBLp0d6FbZgAcrNuPeQnvUfaeLNiVH9DkeR4/ZXnSbuqKDRvyW40k0uscgLLUEh4qr\n" +
"4VJ3G/Zi1GsZkEoqU4JhVtkD/aJWrM2vhwGDYVNSFtwzcxloKYg9eBpB2pbwWaKLaLudiLoRjDMp\n" +
"ebCNz4d1Yh7OlVQhyTcCiet2IIrbJDCTxC/QRwpBSVyoh1hmlMB5q+CprYzQY+qIDVuFhAQqRhMR\n" +
"QfQfoUulMkrKIGFKzBrLEcGsER4iIFn6XjGiBKNfiY7gNpQw+RMlmrxMYzkmSZRkK7hMZCdq3PQn\n" +
"E6qUBFWkJzObRakhjsuDgpfhsqcajrqvxX4fF+wM2ImdYYewK/yw1O8MOcJlh7DDbye1Cft8nLHb\n" +
"zxk7/bdjU8gpGMdGE5J6WNZ2w779IQ7eGUTWG1YA9CJ/6/Y/DhDJfIufm3Y+hsy/HHfX38DA2UQM\n" +
"elfQgGdi9HASBvdH442zJ946XsUD69N4sfkKBp2v4bntGfoJV+Sz7s/+nuZ1qhYqlluh1+QQblse\n" +
"xF2Tveigz2i/HMKRNhf7cgqxsaobllWdsCcYtrV3pexhWtONtTSkhuKSiuRCWMdn4HBaNqIzsxBz\n" +
"/CJCDB3gM1cDgTr2SPGNwQWOoI5JeTDgCGuQXg+ztEboxuZiZ24RwghN5r7ziFMxReIiA6QuMkTS\n" +
"Qn0kLTZA7Pw1CJ+7Gn5KavDbqI64wFXISdZFVCCDOW4lIoJFKbVEgkTAIRQWvFCCREjMC0iiRTZh\n" +
"4EdJEBEcPmZCEcEEanxaDhthClvG7DMuZiexXKyPDec6lm8RYQsRGrQQvl5z4XF9Dq5cnoPLV5fg\n" +
"8pUluOa+HO43luLGjQXw9JwDP5+58POag6CABQji+/YNVMUBH2uWaQH0LNwX9Xex/fZrhD15ixf/\n" +
"yWUwf037HwfI8KPnGKzrgSyijjCkMeBPoXndKQztj2XGoLZGYHhzCN6su4a36wnGJne83eqJNxvZ\n" +
"r7+I+1ZH0UATXq21Hu0GO9FjfQx3rA6hc60rOrdfQCMD+TrLKaf8Ojg03IVt6wPYNPbBqp4+g1nD\n" +
"oFJcsdqOtZkNLB2ysS05HVFFRUgLCkOQ6Xr4L9VDyEpDZF7yhF96EdYlFsM6qwFrc2jq6VOMyzqx\n" +
"lqZ2bUEr9GLyYRSSiuDSWhR4ByJB10YCQwCSttgQKYQlfp4+y7A1CFqshutrGehemohhaZQSpoXQ\n" +
"AAEIg5cBGyGVV0slQCLDGPCcngAkSpRghEMoUgS/mB+HQjw+MoTbBgtAlNhzu1D6H2k79qHLERa4\n" +
"iK9F8AhFVCizDbeL5PYhgXMR6DsTAQTF33M6gnxnSwoPnI9ovnZMyEIkMTPFRcxHavwipCcsk0q1\n" +
"JJZwZ+hnLCODCEkrnOofwb3nOfrf/u3v1PJ/PCDSVbf0GuJXc0OtjzHiX4WRMzkYOZQK2c44PDY8\n" +
"iZKl1njr4I7RdX6QuQRieEMgRuy9MGR3Fa/WX8UbQvLc6RKerDtPr3EO/ZZH0Wt3GPfsjuKO2X40\n" +
"Gm9Ge3Aqkus6sCG9AvYVXbCnp7Fo7oNlcz+Mq7qki/H0C9tgmt8CfdbXtnHJ8MwsQGFpGfw37IDP\n" +
"Ih1ELNdH5u7TSM4sgU1kNmxzamGS3wg7lmbW0tGtdugVNROWRqjGFcC8oBFWmRU4kZ2H5pZbaErN\n" +
"QYquA+KW6iNqqS5il7DkWmyE5MVrETtnDUJmacBtOUfhI+qI5Ggey3IpMGA5/P3mMvjlkIQzKIP8\n" +
"50uQTGSVCC6LCFkg+RRJ43CIgA8L5DoGu4BEgCCWiQwi1ovlcjEb8TnE+hD/hQj2WyABE+I/D0F+\n" +
"AoqZCPOfQ/2A8IC5hGc+4ViAuNAFSAyfj6ToBUhPFOdolKUTmrmJKkhJXomNF6yg4RsFq/gyDhKd\n" +
"eNzzFEPP30hHI8WV1n+L67T+jwdEumPHKEuqu08wGsKscaEQI3viINsSDtnmYIys98CA2SHc092H\n" +
"YZPzGLW6giFTN7zROoRnWq54aHaEcJzHUyc3PHE8j/t2p9BtfRi3rQ6gU5RUx26gpbUXR4uqoJ1a\n" +
"CCeCsa6pH9ZM/VZ1zBqVd+W/ccipg2lGOQyik3AoKR25hSVIveiFK3PUETx/NSKstyA9OgsX8yqh\n" +
"n8TMUFCHdfXdsGVJZlF+G4aFtwhYC1Ynl0EnrQprU0uxNacMJ9PyUFhRg+L0dIRabkLCElMCYc5S\n" +
"yxyJc1hqzSckC5hJ5uoiUoJEHRcXz4H3LjVCooGIWJEhVBAWwIBnyTVRWglNQBIpgSE/yhURvICB\n" +
"/keJMm0ig4QH8fEEZEJiWYi/2G4+QpktBBChLJWC/eZxer70eLE80HeGBEio3w8IYQYJ8yOgLKci\n" +
"CZY4YJBCmJMIXBKBSSA88Z4qyL6oh6y9jsjb64EnlzLx/HAURk6FY6zspvRbe3FpkAKQv6BJP2Aa\n" +
"HsIog0x2qQAjp7MxvDMcg1t9WTr54PmGa3hhfRK1y2zRq7kVA8obcX+5CzrnWqJ5gQX6Cc8z54vo\n" +
"tzsuqc/mKA35adw6EYjipFK4VdAsJpfSZ9yCQ8cTWDWKi/BowukzxGFbHZZC2hllMIlOw8HETEQm\n" +
"ZyL2/DXcWGUCn3maiDPdhNgbYbiWXwWHzHKYsJRaV9YDi6rbMCm/BSPCZVjQBv3MeujQg5gnFWAH\n" +
"S6+TabmIyylAfkIKYvYeg7uSHgK0bJB15gryigqRmZaBEMsNiFhmiLh5NO0/rEEsIYmevgpB01Vw\n" +
"Y7EyPG3pKdxVGYQaNNfqHM0FGAKYCTDkfWjgPIQFzZOySGSoPPgjQ+QwiAwiwBDZIZiBHRowX1ou\n" +
"FOQ7D36es9j/gGACIBTiL4CYLcEizyBc7zcTQd4zEewzSy7fWcwksxAVMBuxfkpI9FVDykVtFJyy\n" +
"ROVeF7Rt3I9OZu4O9S3oXG6P1sXWuL16A55sPouhrHIMvxVZ5H8wIPIL0/5UExcXSrexmTQ/+vI1\n" +
"hlt7MOZbhKEDSZC5xrGECsAbp+t4u9kDb7bewJD9BenoVAlH37rFFqiZa4zqBebo0N2BBw7nMGB/\n" +
"Hj02h3Bn3WHc2nsFFf5J8MuvxraCeuinVcP55mNmjgewE2VV4z2YcdQ3KGuHRloZjCi7kHi4E45Y\n" +
"7zCEOOyA7zIG6tr1iDziBp+ELGzLq4Vxdh2sWYpZVHZTnTCmTxEZQyerjn6lDuZppdiYVoCTWYUI\n" +
"LShGWkwCUk7z8UZ28FhlCv8tBxARlQiP/DI4xaXClRAFx+ch9dB5JKyxQ+J8PUoHcQvXIHQ2s9ZU\n" +
"NfjOUcE1w6UIuqBMCFYyUBcRgKUI8Bsvk6TMITKLKLEY9FJPMDiyh3L0DyEM8rJKDojIFnJouI7B\n" +
"H+D9g6RAnx/43OwFCD4zEeg9gzBxmfAcnA/wpv/wnIGgG5x2n4XAK3MR7aaE1GNayN1liTJbZzRa\n" +
"b0WDpguqFtuiepEV6jh41SlZopUDW4e6M+6t2Y63dqcxGpaB0afPpSNaIi7+u+0fHhDpRN+Y/N+R\n" +
"iZ0indsYkmHs5VuM9D/BSF49ZEH5GDmZCNm2SIxsDMCw/Q05IMJ8r7+MF7bn8NDyJG7qbUeT5kbc\n" +
"0t+Bu5ZHpJN//Syp+mxP4g59R497OGqK63CQJnxNTB5MCtslr2Hb2C/Jqk6UVfeYPXpYWnH0z6jg\n" +
"qJ+LCzklKM4vhrfVRgTqrUPS8SsIiUzGkYIyGCaVSNchibt2WNO7mIpb3hS3QSu9Dnosy9Yk5sIp\n" +
"qxRHMwoQlleCzNhkJDJL+OhZ4aqyHiJ2H2G9H4urWcVwSS2GUXIx1MPSJJ9jFpuLa5nFSPcMRqbJ\n" +
"ZsIhPwwcO0sTkbM1Ef7DagTPUcUVraXwPqHM8koNISyXgn2XICZsJbMEQWE2kR/VEplkCbWU5pr+\n" +
"YVxhQXIwhKcQkEzAISTA8PeaRQBmM5PMhI/nFAR7zYQ/QQjkcjEdcGMGAq5Mg+8lepzTKkjbq4ss\n" +
"ZyMUmNuiTMsBtSscUbLAFHkLjFG00AxFc4zQvNIBTcvXoUPVGd2qG9CrsRkD2jvwyuooZN7xGH7A\n" +
"751ltQKQcYlLDEaHWXe+GsHQM4JR04nh1ErIIgowei0ZYxcSMbjNG2PWV/FW/xherz6Ax/qH8cL+\n" +
"HJ5ZncRjs6N4zDLrvtVhPLQ+ikc2x3DfhuUUwbhrfVySOMfRdNANNYXlOJldDIesSlgXtcGq+o4E\n" +
"hnX9PenybSELCZQeONCDWBa3YHN+DW6UViExhmVPVDoOJhfAMbcWFoVNMC8nGDXcTpwdLm6Hbm4j\n" +
"VtFjaDLQbdOKsC8lG/5FZUhLzUT0sQsIXOuIq0tWI9LFFSkBMbiYkY0tLOHM6EmMs+thwrLOtOwW\n" +
"TAiZTnolzFKKsY/lWHxMCrL3nEbsMhOkzdFDArNJPLNKwlwtBM3UxDVlVQQcUkZY5Ar4Bqxk2SWA\n" +
"EUevliGMviOEPiIkcDGzhjj6RKMtvAQVQjBEaSUgEQqm4Q/ynSNJACLgEJD4EgR/dwJzYyoCPKfC\n" +
"79o0+F+ai4iTqvQShii0tUWBgSWKVC1QuNgMhYSicJ4xiueZoGIRs/oKOzQSjMaltrhJMG6prMdt\n" +
"VRfcUduIHvVN6NPchpfmhzDiEYPh+0+kc13/owGRLm9mL/7T0mDPfQxmNWA4qQmj+XcwGlsL2fUs\n" +
"yC4QjjMJLKuCMaC5G/eU1qP+Ox1UzTLCI+ODeGF1Ak/YPzQ5iAGLw3iw7jgGbI+hjyb8rtUR9Fod\n" +
"k9TD7NFl5IoiFUtkW+5ATmgiQsvq4FrcABcGtvglnLgyVciCgIgfAYlMYl3XD/NGTpd0wia/AZaR\n" +
"abBhwFoWNMOymsuqumFVKa5kpYlnSaXFADfIpElPyIFzcho88oqRTzjiPbwRaOwAz3la7NchwycM\n" +
"PpklcGDJZRSZCfPCNj5HO0wqxCHkNujlNzH7NEA3swYaUTlQ9YnF0bQcVFfVoODAeSTSkyTPX4OU\n" +
"edosu/QRxYziP0MN7spKuLFbCUGhyoiPW0EwVrAsEod+mT2Ep/BjCRW8XJ45pGXMHpLpFt5DDsoE\n" +
"EKKfgETMe7tPQzAh8b0yFYGnFyHp4FrkrbdCmSF903ITVC+3ReFcE5QutEDRfGYKAlK9zAwNy21w\n" +
"i1mkSWQNqo1q53zHSid0rnBCl4ocknurt+KF2UEMX43EcP+j//MBmcgMwkdINzXjBxZQTFx5K25N\n" +
"M/rkFYYYDMP++ZC552LkWi57Tl9Ph+xENGTHIiE7HIbRPf547XgJ3fr70E8gnjLoXzArPLc+hocW\n" +
"h/CAI8+A+UFmjaO4Z3mIGeMwui32S7pjeQC3mT1uGu1EhZo9Clj3pqyyRe7Zq0gtLsf+8hb6g0Ks\n" +
"LWmFCcEwEleoVjOLiKxAWMxqmWHY2wpVd8O+/q40bSUO/ZZ0wIjBrZlVg9VpFTBModlPzoZbdg7y\n" +
"q2uQExYLPwtneCrpIkDTArlX/RGRXYLtqUXQj8qGWWkbzCq7mIU6pYxhXNQsgbEmpwnaCcVQ8wyF\n" +
"SUQyNiQkIbKgCEUpSQjWsUbWD/pIomGPn6+F2Hm6BEQL3rNUcXy6GkycbbH52nZ6ES7zW4KwCAJB\n" +
"zxHC0klkDFFmhQWNH4ESULw/bLuAQMxFqO8MhPjMptmejVAa71BC4u85F94Xf0D4wRUo3GyNirXO\n" +
"0n7MnWeIEpZNpfR9+fMJB/sySmSKJiV7ah2aqRZpWq5mqpW6ucJBAqRz5fr3gDwzPoDhsyEcMPuk\n" +
"nyv8Hw+I9L8zRC1JOMTdMUaku+mNYvT1IAZpgl8nV2PYrxij51KlW8mM7I/A2OFY6ddsY65BGNrq\n" +
"jcGN1zFEnzHkchWvNl3BS3G4lqXTU2aKh4TiETPHY9auj+k5ekz3YcCO5ZXTScmUCzA6TV1xy3g3\n" +
"2tfuRJPuFtRpbUCRkjXiF+ggw3IrGmmGfXLLYJOeB/P0WuixvDGsZlZgZrCQMgozC2EQpZgtSy7R\n" +
"m5Z1wKCgBRoc4VckFEI/vRhr6S0OpGcjNbcKpRztfWwdcWmhBtyX6yDxwGlkVjZiX24JjJLoM2jq\n" +
"bVlGCt9iWtYJI5Z6+vkt0M5qhBbLM93wHBh4hmNPRi5uZGShNKMQaUcvwn2BJkJ/0ETsIgOEzdWh\n" +
"D1mLwDlqOPeDMnYZO2NNcDiWekRhnlckDE5YISRoFUGgkb84TSqtwsShXJE5AukxAuSHasVRqxB/\n" +
"llQsrQKFKfdYwBJqCkuqafCi2fY/vgzp24zQ4kx/p7MLhbOtUTCLJdV8wiHKqLlGqFpmjYYV69A4\n" +
"DkMrg194DCEBxI81GZDbkwB5arQfQ0f9IOvsGa8wxoPpv9H+twZEXMIs7qYhrq6V/pHKw6cY630A\n" +
"Wd0tvNx9Ay8OBuHlqRjulAgM7fKlz/DCM8eLeL3JHU9dLuOp00U8XXcWT23oL0wPo5/+oocZpI/l\n" +
"0x3zfeg1349+ZowHYrnJXnSb7cNti324Qzi6rQ9KGaSL2/VYHUSr/k606O1Es+52NKzZgurVTshZ\n" +
"YoxYPVtUBcYgil7ANaeIAcyyp6BRKpuMK7phVtMN04rb0u1qLDjam7AkMyonICyplvonQiM8E5ti\n" +
"M+FHKNLjUxC08xAuLmHGUF+LqM0HkZlRjOsltbBLLJDOpRixXDOtug0jPr9JMUuz7AaCUQfNpDLo\n" +
"JZZCzz8Bm1IyaNozkJGUjsxTl6WLHT2mK8N3uipNuSY9xyqETFPHVQKz08gJlhf9oMmMtCIwBZr+\n" +
"SVANjoOSdyw2+rjCP1wb4eFq8PMShnwZIkJFBhHnMwQgImuIMkocjZoPX4958POYgdCzSxG+XRPZ\n" +
"tvaoN92BomU2KF5ghfQpGihfZIacHwxQSSjqlOxQTzAaVtizlCIQBKRNiXAspd9YZvdnIZkMyOQS\n" +
"68nafXjt6g5ZS6f046l/CEDEmxTBLgX8uCbm5VfV/nFeHI16J36L8eqtdJe/kTv3MVLfhWEGgSyy\n" +
"CDKvNMguxWP4WBhebHLDbf1d6DY5gAHHc3jK7PBq63W82HwVzzddxZPNV/Bg/Tn0MvgfrjuJ7rWu\n" +
"9BZHCcRxPHQ4hXaT3YRhP1qYGbptDqNWdzPnD+Km6V60W3I5s0ab+HGT3g7cYiZpNdiFDpN9LLVc\n" +
"0c7pep2tqNXejHxVO0TP0UbmnuPIjkvDhYwCeoMS6KWVQ48ju7Ew38wqZuXMKmK0p08QgIgreE2K\n" +
"WAolF8IlNgPBHO199h6F12pThNvsQGJYIiIab8Ke2cWUmcZI+BYCJ8Aw5PMail/ccfma1CpoJZXC\n" +
"MCYTmxOzcC2rGDHMRuUe/gg0sIM3wQicsRrBwmfM0kDAdHW4LdDGfn072O48Bv3wZCgHJ2BlWBpW\n" +
"hWRBNaUcavHlWEZoVQLDsTXwIDz8teHvvQRBgctZZs1HROBcQiJO/M1nebWQWWMxgt1UEHxQFUmb\n" +
"1yLbzAHVWhulsil7tj4yZ+ggZz6XM3OUsaSqX2KBZnqOBkLQzEBvWGaLesJSx16AMaG/JIMIQIRJ\n" +
"F4A8MtyLV9suYaS2dfw8yHgQ/jfazwOICHwSLYyTuOmB8BPiHrAycS9Ycfuc528g638K2c17GK28\n" +
"idGMOgxHFGLUIxWy87EYPRGBd0fD8e5QKN6xfHrnGoixnX54veGSdIeQNu1NuGWwA52Gu9C1djea\n" +
"GbjlxttRZLsHeewL6B8ydTchfc0GpDBDJBKGVL3NKNHfgVJCUMsyqsFkD0E4wpLqEJqN9qBefzsh\n" +
"2MZMsQmtxq6oXOWERj2WWGs2ynudTVy/CXV83iINOxpNCySZb0HaVR8EZBVgc2oBDDn660pHlm4x\n" +
"o9DMC8/B8kpI/MxUulkBTbVZTg225VTBN7sYoREJCMwqw04GqkV6BXQyqyS/YkK4jAvbpRs7ix8R\n" +
"aWRUQjO+AGsiMuCckI0L2QWITkhBkpsHgi024sYcdXjPVIH/TFUEzVoFX8Jxea46jhqug/Xu0zAK\n" +
"S8GayAwoB4gslgHVuCJosDxTT62EMjORUnwhlCJzoBYYia3+rnD3onkPVEWA1wJ6DHFJCL2H+0JE\n" +
"nlZH/C4D5NmtQ7HWelSorkP2PMIw2wDZM/VQwBKqaL6x5DUalhIKajIEk2H4j/RjOEQJ1kY4BCAd\n" +
"NOuTPchDDoJvNp7HaFktKw9xM4fxIPxvtJ8NEJEdpN95CzjoIUbuPcZQYzdGSlowzFFwJKwQMk+a\n" +
"6wtxeHcqGu+OhGPsQBDG9gdi1NUf73b7ASyj3u3wxrtt3hjd6kndwOjGq3jrcFbyEuIHSvdYBrWo\n" +
"Mp1v2IXo69cRdPo8gk6dhee+Q9hrbIGtZzxgf8QDTgcuY8fui9hz4DqOsD/scgSnDLfiBgEIX7Ue\n" +
"JYZ8HpZT9fpbUa2xCeXqDqjRcpH6Km0XVBC2aqpOewMqVjujVHM9clVskbp6HdIOnkdaSh5O5RbB\n" +
"NrscOgxm3SJxA2aO/CU3paNeAhLT8m6YiN9kVzKz5DfCPLUULhmlME8ugWF6zfjFiSynijuhX9AG\n" +
"3fwm+Q+HCI06g9s+IQdHEnMRk5qN2IteCLXdAvdFWvCk4Q6ZtwqBP6jDd4YGLs1RxQFDWzjuPAxz\n" +
"v1goR2ZC1T8WGpHpUIvJw2ruf/WUSqilVEA1uZyAlEE5oQxKsTlYEsZtfcLh6L4LvoHaCPJeBL+z\n" +
"K5Bxygy5m6yRb2SL3GWmqFhkydLJCFnMGLlUCbNHNZfVLbZmYDPYCcEEHP8RKP8RHEISFIRhsn4M\n" +
"iDgXcnfVFjwwJCBOZzCaV4HRIXHPrPEg/G+0nwGQdxgVl5eLmx+03YOskEBw58uC8jDqmYFRcYuY\n" +
"01GQHSMQh0Mwus+fQPhiZKc3s4QnxrbfwLutHoSB2uKOsc3XMbbxGkY2XYSMGeSdC+V8GWPOVzDq\n" +
"dAkyepAXNkfQeuw6MtKSkJ6UgNyMFKQlxyE6IhjHwqKwvuom1uU3YF1Fm3QhoC1Hd9v8NpiEZ0Pv\n" +
"tB9sj9/A7l1ncU3FBglq1ijV3oI2ZqEGZp5SjpAVzCZl9CCZy81RzL5CawszjAtK1e1RoG6HzNX2\n" +
"iDPbiNyASIRUVGJTMr0JR2bttArpxmhCAhJxKYl+OQ12Gf0KyzA9egmDnFqY0p9YVN6BiXSUqx36\n" +
"eS1Yk92EVSmlUAtLh0NyAQ6l5yO9tArJPkEINF0Pn0W68GG2CJypxqyhjqC5mvCYpY6DWhawP3oJ\n" +
"er4xUA9Ohm50JpR846AaXwx1vh/1tEqo8r2pEg4V+hghKYMkFGJldBEWh2VhgVcCFh87h8M7jJG5\n" +
"m/vDegNKtB3pwcyQN9cQecwYaT/oIneugWS8a5ZYo3aRlQSFOG8hehHsk+H4rwDyY00GpINl1sTJ\n" +
"wgf6u/Ha9jhkaUUYpW/9uwIiE3+j4pb5MgyPDGF4jKWSjPPvZBgU/yiFb3Cs7wnrwU6MJlVAFpKL\n" +
"Ue8MjEkn7uIxdjIK7w6Hycul3f6EQWQHapsPxrZ4YXQzwaCXeLfxCt5tIAAuF/8o5wt4t94N75wm\n" +
"yeE8xuzPSXpjewKt+y4gNjoUMZGhHM0TkE5YsjNT4RMahi3VDXCkeXauvQ271vtwansC+5uPsL7l\n" +
"CWw5alszSMUo7rLrDE5+r4xrn9OEztBG+hIrBgYzhuYGFKmvJyQbUKjigPxVFL1I8Tg8ApQsZSsk\n" +
"aVgh7fRFZFdV4HBmLmzTS6FF36CV3wr94lbpsnVxFl36JzFSyUUzL8Rp6XxGWRtW5dZiNYN4Fcsd\n" +
"3ahU7EnKR3RxKQpowMVlK5fnr4IXyye/GasQQJ8ROE0TXtNUcXaFEewPn4ZhQBw0g5KgHZEF9YAM\n" +
"aHFwUkkulaBQY9aQwEiuwEpKid+TUkI5VsQye8RTgdnQuhSBg9svIJplZrm6E1Ln60gGO2+OETKZ\n" +
"KbIWsJyiqpfboGGxDerHA//HAEzoPwJEgNA4WSLjvNefwiEd5aLaqHYJkj8Ccl9vN17ZncQIM9+I\n" +
"uKnd34CQvxqQd7JhlksEg0C8GR7Em+evMfJqEMN9TzHGUmJE3NfVKx0jlxIxfC4aY8wSY0fDIBNl\n" +
"074glkyEgkCMbmeWEKXSJncCQW1klthwHaMuVwEpO1yUS0BAKCDgcBQwnH2vd4Ti3TpO2zG92p7G\n" +
"K8sjuL33IvIykxEeGoCU5Hg5IFlpiImPxJ7EdDg23IMtR2pxTsLp5lM43HyM9W1PsenmEzg33IVD\n" +
"RQvWFtThvOt5JHN0TJ6qiuCZKxEm7lQyXQsFKutQrrUByYuMUaBmh7yVNihfzTKLXqScwBRyWbay\n" +
"JZIWGSJS3Qo1JVW4mlsMi4RM6DJAdbNpsotaoUsQDJlNxH1nxZ099DivI+DJv0kTXg/NhGLoxefD\n" +
"PjULYbkVKM0pRfzmQzg/QxkBM9QQSBPuM0cFYXNowKep4OJcNdjuPgjdlFwo+8RDK4JmOzQVKwmI\n" +
"Bn2NqsgY6ZXQSK9iOSWyhrykUmfWUI8thAq1NCYfGlcTseeEH7LsDtBX2COHBjtrqg6qFlsifbqO\n" +
"lCkqRQm11AZNK+xpvG3+HQATEAgAJgPxH4owTJZ4zglNBkRIACJBQkDESUQBiDDq93UJiO1JyPiZ\n" +
"R168+vsCIhsjHEMv8aCxGQMpBXhLIMaCCiC7nIrRszEsmVguHQiEbI8fMwSzwg4vjG1jVthMCLaI\n" +
"XmQIwrDpGoG4wqxwmQBMFuFwdMOY0Hh2eOdwDqAkMMaBEP07QvHO+hTGrLhzLE/gjdkhtK8/hggf\n" +
"T4QG+0uAJCXGSn18YhTcmEWciuphVdiMDXX9sG1+KP2zSIfWh7Bvvk9QHsGq5BasOHqblbbi6qHr\n" +
"aFRzRstiU5TPN0DaVE3EfLUS8dM0WH+bIG+pBbJZbuQqMcOorZNux1PE8qxMwx75VNJKE8SsskSe\n" +
"eyCCs/KwMSGVBr4UmpROXh20chugW0gwCpoln6GdXQe1xEKWZUXYkJSJK1lFSE/JQeKJi/BQ0oX3\n" +
"DFX4TyUUc7UQOF8T3iynTi83hrPTDpgERkPdMx56QZlQDk+Ug8GySY1gKGdUQSW1AmqUMmGZgEMp\n" +
"rhgro0qxKKYAWiynjh4IQabzadQrb0LxXAsU/LAWpUr8jIuNUbbIAlXMFtIh2mXy8ql5PPjrCYsA\n" +
"ZbImQJkAaLL+ZN0HAPmTkov6E0CUHN8f6h3Q2YVX1iyxgtMw/PzFzwuIeKl34u/dGN6xtBp68wx9\n" +
"9zrR1VSHvpIyPI7JwOuLEXhzPBAjrjTRO5gV6BlGN1zF6PqLcjldYMAzGzD4x6hRJ7H8An3Decjs\n" +
"z2OUGhNaJ0QgxDICICRgeEdB9ARCwDBKjVlTorc4gVEL7hzzY3hjfgh3nI4hNdAfkREhCAnyR1xs\n" +
"JBISopGckgifsFBs4wi7vqoDlpkN0qUhdg394/9V9T5sKfuW+zAqaIEFA9gypw5uJ31Qu3oL2jma\n" +
"3hLBsIT+Y44eUqaoIGWaFvIJSM5SM2YWawmOfCVLFK5gvc4RrlTZFmlKxkhUtUCm62mkJKTjQj7N\n" +
"eHwW9FNKoEVDvjqnEatF+ZVQCmMGrnNyNi4VMFukZCLfMxjBhuvgydIpYPYqBM/RRijlSZ1VNsUW\n" +
"h50w9Q6SPMaasFwsDUuCZnSOBIAqfYZaGsEQ5RQzhjpNuMgWK+lnVCbgCM+Fjk8S9h73R6qLG6o0\n" +
"d6B0oY10NKpogRk/G8FfZoNmFSe5rxCff7mdBEndCmq5rZRJhCGfDMePNQHEZL1fx+eb0I9hmQyV\n" +
"gGQyIDcJSKeyM7rVNqBfZydeWR2DLCAZI8+e/7yAEAvpFo8jQ6/x5tkA7vXcREd7Axpqy1Bdno/q\n" +
"shw0FGShNTYePf6R6L7sj7unvXH/pDceH7iCZ7sv4fnm83iz/iwGHc7grd0pDDEdjjDAh6xOYJD9\n" +
"sM0pyGzky0Tgv7MhDMwM76xPA5wHtxN6J2RxTC7L4xgxP4JhasTiKIbMDuON6QHcs9qHVLdLSIiP\n" +
"QkxEqARHPBUdE47wsAi4BkfBIb8e9iUtsK7ogK24dqr+jxKwrGMZZi0uIUkvg3FGDdzcAlFktAOt\n" +
"quL4uzNuqjqidZklqhhIGVNWIWPOGqQv0EP2MlPkKVvTxFsilyAVrGCwsfTKWGGOuGVGSF23B1le\n" +
"wfDNL8LOdJZQcflYzbJGmz7DNiYLR5kxolLTkeIewHLqALzFYdrphGOmBss8ZowFOji7ygKbHPfA\n" +
"kvt5dWgKNELToMqRUy0yH8oxRVLGWEk4hNdQoc9QJhjSEaqUMqiybFOmSVfidlr+6dh5KhTh9HmV\n" +
"BruRt9AIWfOMkUHDXbzUEqVLLFGz0g61K9dJFwwKCTBqCUUNoRHeo5qepJZwCGMuIPlzmgzLv5M4\n" +
"FzKuybBIwEwGhPoTQMSRLH4X3eobCYj8it7hgCTInj77mTMIjffQ25d4fL8Xfd3tuH2r6T0gdVXF\n" +
"qCjJQUFeGnLTEpCXk0pDLD9yVJCVigIGZlFkOAr9A1BywwsV1z3R4O6D5qve6Lrki96j1zGw7TwG\n" +
"7A/jBc3gkOF+jDLQx8YBeGc1LjE9WVw/Zn4UI9xWACI0aHoIgyYHcd9sL7KPuyGaGSTI3wdRUWES\n" +
"HNFR4QgLD8O5wACsT8yFTVkrM0QTbKt6pGunJq7MtaJsm/oIzh1Yl9+EYVoZzLMqccAzCilWB9Ci\n" +
"zNROQLo4ct1RcUTbcmuUMagyp61C4nQN5Cw3RdYKcXtQc2Qxm4jzJLmEJncFa3iWZfEa5kjZdwoZ\n" +
"2fm4mFuErbFpOEBvFJ6Zh8SrPghz3AK/OZoI+IHGe9ZqGnANePyghRM6Nti06xjW3YhlCZUBlZBk\n" +
"eox0rIzMhkpCEQ03ARBeQ5RVoowah2Ml51cmF7N0K8fiJGaQ0CzYXImH9w5PFBu6ooAwFNBsR01T\n" +
"RxmzQTHLxupxMGrHwahWspXKKwmMpdaoYmapEj1VQzhqFlpKkPw5/UfQvJfIQJM0GZgPArLSUX5V\n" +
"r8Ym9OuKDPJ3AkRGI/5koBu9nS243d6I9pZaNDdUyjNIZREqygpQWUqVFaKiOBflRbnISUtEdnoi\n" +
"EhOiEM/gjAzyQ0JsBCLDA5GbmYrE6HCkxXFkD/BHemCQdOOBxmt+6Nh8FM8MtmHYiKAw4CWZHfkT\n" +
"yUwPv9cwJTKHkADkLR83oLcdBduPIsTrBtyvXkIsS6zwiGDEENSwyDB4+bpjZ3QKrGhWTfObYVMh\n" +
"vxp3AhJbAQh7R1F2Vfdy/S2s5chrnFGCLTeiEbz5DGrUndBBSO5Q3WqO/LI4ui4yRcEsbaRM1UDC\n" +
"TC2kr7RAjqotYbEkLOI2ofQszCyZ9DPxzDRJTruQExaDnKJSxHHQiFnvimBVE/iwlIri472nq+HG\n" +
"D6twWNsaTnvPwNQrBpohqVgVngrloASoROcyM5RCWRyiTZZnCxWCITLFSuo9HNSKJAIUU4KlASk4\n" +
"eTQUqfRrJazfixeaI3mWFgrmGUqlYg2DsGqFrQREDTPFBBji6JXQBByVNOyVAhJKnPP4ewByayWz\n" +
"uRikCMiALj3IzwnI+9vKjw7j9bP7BKMB7U3VaGusRgvVWFeJmspiVJUXSWBIcFDVnK8gLEV5mZIK\n" +
"qeKCbJQV5qKEykxPRn5uJmIJSBwD1p9mOpCjfFx0BDyvX0aayDSnruCB7na81nfFW+ODGGZWGDE5\n" +
"gGHj/dQBZgmRKSgCMcR+iNsIDRpxG2ag+zpbUO68H+EExPPGNcmLRBCQKJZboaE0y4G+OObrg40s\n" +
"nSwr2qGVWgJzcTFh3QAsa9jX3IGNuMCwukfKLnbi9qBFrdBMLIYBR2nL8EycOOGJHB1nNC2xRbuK\n" +
"LW4ri5TPL07JBiXzTJBGEx/7vTIyFhoiXYlwLDdD9kpLJC81lm6mELfICIk0vnFadog2dUaoihGC\n" +
"5q1B4A9rEPLDagTS2+xfaQon12PQ8QqDNqFYRdO9nEZaPbaAHqJYDgVNt4pUSk0GhBJwsF+RKgAp\n" +
"xdLYQthdSUb4Hl8Ua29B5nxjZM1di7TZeihabCZdNFgtsgVLKKmcEoCMS5RSAg6RLWr5eatZFlUu\n" +
"kWeOugVyOISqFnEbLqukqhbKJaYrqJr55tyGmWlctYvl4DRwmx8DM7n8+jEgLeM+pJ3ZYwKQnlXi\n" +
"MO8uvKYHGQ5OxcjTlz8PIOIOhKMjb/Cw9yaaastRU16IqtJ8llT5KGWWqKsuQzmny4rz/kQCGtEL\n" +
"MIrys94vL6WKCnOQT2gKuLwoj+up7PQU+Hq6IywsiLB4Iy4oAGV7T+Ch3m68XbsfI+JSZsIxZLSP\n" +
"EOzDa2aJCb1au0/SS8O9eGHgihdr9+Kh7hZUW+1AsJubBEhoSADCCEY0S61wvkYAX+PS9evYmJAN\n" +
"w4xK2Je3w7yqE+JmbmaVd6QLC60quwlJj3TTBJs6lmCVtwnMHZjlNMMsvQnaNLg20ek08NeRudIB\n" +
"rUtN0caM0bzcEe2L16Fp9loUztZF/Nc08rPWIG3RWiQtkUscDs5kGROzUB+Ry3QQvUgH8bN0EUV/\n" +
"4TtHFXvVTOBw/gpWhsRCLSKXYGRgcUACVsbk0GjTYDPolQmDqjDf4kTfuOSAyKUsHcatwspEbheS\n" +
"g12nI1DoeIEBaYPUOcZIm6mD/IWmKKdHqiLUk4EQHmNC70sqSmSMCvYVInMIEBjkYloc8q1k0NcT\n" +
"AqEG8ZPYBeaoE/PzzNFIlSzg4xYyGy20Q+UCvt5cwjDPFmVLCOOk7PPXANK7egse6u/BG5vj0mFe\n" +
"2QtxHmQ8kP8b7cOA0JpL/6jyzXN0tdagvqqEpVMOSvIzkZ+TLgV/QW6GJJElxLKJ6YnlJYRBACKW\n" +
"CeVmpyGL/iSP68SJu9zMNAmO/Gxun5+NlKQ4eHldRxzLsuQQmmJmgQHtbXiux/pSjBDibCn1nDvj\n" +
"vVhDP1vriqcGe/CERvMx1w/obEaV1U54nzqFGx5X4eV5XQJEgCIACWa55+1xHbt9feGYVw/jrCqs\n" +
"isuBmbjuqaobZuWEhRJ3Hxe9eZn8BJ74/YVVfTf0CqphxLJFLzIX6hHZnM7GiR2XEKmyDkVL9BhM\n" +
"pgwkBgMDpGQ2M8iU1UieuhrpNMGpS4yRPF+fWYKlGLdLnKeP2MUG8FIywm4dR5gdvoRVQZlQp7RD\n" +
"s6EbU0CPIc5jsIwSGSGdfVr5H49O/RlAxAlAlQSCwscePROKSpszSJurg8zZGsxwFqhcymBUtpfK\n" +
"p8lA1BKIyQFaJ86MEwS5z+DIP58ZgBKfrXIO9YM5ytkLFcxniTnfhOWaMfKoHCpbZCpxPdactSgV\n" +
"12axz5tvhBxmz4KFxijjNuK5JiD5rwDSobxeMuh3NbfiIb//l/YnIEspxMibwZ8+g4j/3DosG8KT\n" +
"B/fQXEMjTm9RWpiNUoJROA5AHgNe9Lk045n0HEJiWQ4hEMsENBPbZGckS+Y9i31OFrfh+jyCkZ8j\n" +
"nicDmWL7rHQkx0TB+8Z1xNMrJFy7jhrr3binuRmPNLfgGWF5umYbntCQPdHdIekRpx+yf6CzHQ+p\n" +
"J1x/R2cjCsQRoCsXce3qRfj63JDgCAn2l0Dx8/WEj68HzjHDOCXkwzi7CnZFDTAua8dacdVtyS1O\n" +
"i2uhOqQ7i5iWcFpc+sFsIv5PnnlFl3THRF0GqmVuJVZH5UIvsRAmwcnYcNIbpx32w1/TAXHqjohf\n" +
"YY14QhE+m6Z7hiZ8mSVClU3ho2IK79UO2GfoAocdx7H2aiDUQhOwIjQNK3zjsYZllFJkHs01zXaq\n" +
"HAi11GpmhjLCUiFdP6XMZZMBEaWVKKtE9hDnONYGZOKGqz/KDPcgnyVdPku/grkmhMJSutRcqHa5\n" +
"OKdhJ11dOyEpOMWZcapOiIFbtcgCxUsskaBmh0htBwQSZp81TvDQccJ1fobLRi64wGVuWutwbpUt\n" +
"zlCnNG1xRMsGB7QpHQfsN3TCPtMN2G+xCbutN2KviTOuqNgje6EZs4soveTXb9VJYP4RkIn3JXkQ\n" +
"AtLGUlYYdHEES/iPu1oEhJ/xufNpDOdXY3jwb/P/Cz8IiEw2gp47ragrL0VNhbxEEhlBZA4R+AIG\n" +
"AYKQCP6s9CRJYl6sE9MZqQlSL9aL6Qxx2UdyAtKS4pHGfuIMd052utRnZqRIy2KiwxAS4I0QlkIx\n" +
"F66iaP0B1BtsQedqZ9xe5YRejfW4K0SjfE/Dhb0L7lF31eXr68y2I/rwSXhcu4SLF84iiJ5DZA0B\n" +
"iJAAxNf3BjyuX8F2b1+YsGSxzK2Fca64+pYgCIkLC8V1U/+BxOUhQtI/fylugj4B00kUh2ozsToy\n" +
"GxpxedCNSIXFtRA4nvGC/Y6TcNx8GLZOe2G5YS/sthzCuiOXYO4VDi1CpRnOxzAbqYRnQZ3GWy2p\n" +
"HBpp1VBPJQCEQYJjXJOB+LEkY55ShWWEw9A3A957CYeOK3I5qk/8QKmWJZQ4f1HPUXhCjcvWoXnp\n" +
"H9XEskeocTG3XWxH32GD7BVWuGzmgo3bj8D+8HmYHXKDietZGO86BdPdp2HAz6e/6TAMnfbD0HE/\n" +
"9B33Ya3TAejb7YHRur0wX78fpvausLDfy/2wH9bS/G6YGBEWDQvEs+QsXWSGGmarapZt0jVdNOwt\n" +
"fB/S4V6C0SJu88P3Kxl0ZSfcJiBdzCBdWpvRb8pK4rA7hlo7/2b/Q/3DGWR0BJEB7oiMikBlSa7k\n" +
"NQQgEyXURNCLfgKSiSwilovDvOkp8e/n5cviJUCS42MkJcRHI5lllTjTnZaaKCmDJl6c2PP18YAf\n" +
"fYmvtzt83M7Cf+8BxG9xRarLTuRv2MPyazeK7Xegwm4nyi23ocJqG4rMNiF34wEE7zkAD7cz8HC/\n" +
"LGWPiPBgSSKLCFB8vD3oebzh7nUZp8+chkN6CUyzKmCcUQVzZg+j0ltyEIraYFDYCv2ClvfSExLL\n" +
"xC/5KD3x/y24rV5+I3Sya6HLQFUPzYRqGDNBUBLUw9OhFpJGL5GJFeFphCADK4OSoRqcAtWoLCiH\n" +
"ZtBjEIq4QqwS5y4ymC3Ec4irbClxzdRfCogqTfvKlGpoB6bCc+sNlK/hPqIJLvjBCBX0BPUsoRqU\n" +
"mDkYcE0MtveaBMePIamj5yhjsF4wdIbxnqPQ23MeWvsuQH33eahtPwXVTUeweuMRaDBraqzbBw3r\n" +
"PdCw3I1V1q7QstsHLeu90Ft3EDo2rlhjuQtrzHZCn1prsQtG3M7UhPMGLtitZI5wlpyVzFTiqFgt\n" +
"X1Mc0ZK/N0Kx1IES09bMJvwc9H11qzeheM1mZJvtRuneS7iTVYTXz1/8PDeOGxl9i1M7N8KbXqCy\n" +
"WG7EhZ8QpZMAQgR8SmLM+2whYEhNipWWT/Ri2YQEMKkEQYARK8qn2CjExkRIxlkcZRLTApj4uCip\n" +
"j2MW8eco7ymywPnT8GNQXzp7Al5XzuHiyWNwP3cGnm7n4EcFUtePHIL/+bO4fu4srly9gOvXLkpH\n" +
"yERJNQGHyB4im/j7eUmQePlew43LV7DLJwTm2eWwyqyGYXad9J+OJoOhm9f0R+U3S/8WTEiAolPQ\n" +
"Dp3CNmgX0LhzW/HvwjTSy6HJYNZMLIJyHE12chHUYlku0WSrMUMohWfSgGcy04iTdkXSVbXi+ihx\n" +
"gk9FnP1msEtgCAlI/lJARHkVmc+yKhCFKi4oZ2lUPMeE/oElFYNNuhiQZYq4bLx5mf0fNQkMoZbx\n" +
"ZQKQ2kXWSNZwwrpNe6C19TTWbDsD3V3noUNAtHeegubWY9BhhtTeehxrnA9B2+UwdNePa8MR6HHe\n" +
"wOUIDLjOcP1BGDocgBHBMacsbffBmrKy3Q0Lww3YtMIcQYvNkLPMHMXL+d4XmiKffiePpj91qSVi\n" +
"lC0RpGqJaxo2cNNluWa8Cdstd2DLxqNwPe2Fa/EZaO/ux/Cw+D3ITwzI6OAL7HW2xTVPT5QW5b33\n" +
"HBNZQgS9AGQClOSEaCTFRyEpIQaJIjNMZAUBzrjEutjoCETQKEeJQ6/jI7swztKysGBEhgYhntsk\n" +
"xkQiOjIUEQzwAO8bCPLzxlmC4e9F73DmJDODJ7zoVYQJD2JWuHLZTQp8t/PMHFwmnjOemUiAJ6AT\n" +
"ryPWi4zi7eWOGzeuwtPrOqc9cObieTjlF0OXNb8RfYVBkQh+giGuj6J0GPRrcuX/jlj0E/8/T0g7\n" +
"l2IvrqnSymmAZnYDNHLqoMnp1YRNI7OWqpYDwCAWQS/MthpHew16CvEjJXWuE+vFj5akHy5NAkJM\n" +
"T5YEySStyKjBSmY+AZIyTfnh81Go0N2PkvnWKKbfEOa6kT6ieakdYSAAlChVBAR/VhyxhS8Rd5ws\n" +
"XmwBXwa3zt6T0N1+FlqubtDccw6r9pyFBssrDfonjW3HoL6ZmYQAaBIKLUKh7XwYOgRDl72eE8sv\n" +
"p4PQc2TJRUAM7ffBeN0eQuIKK2YdG5bQtlxuunYTrDXtsFXXEXt11sOVfme3rjO26znDWd8Ztgbr\n" +
"YWHgDCPD9TCg59E13gwdlmy620/D9JgHtngGoqy5HUPSD6Z+YkCGnj+Cq7MNTpw6jbzczPdwTGQG\n" +
"kREEGOIkoBjtxUlAAYg4vzFxcWAqPUcSt0kkPEKidBLZQozqEyO76CV/EOgnwREeHCBJZBkxHxbk\n" +
"jwhmsQQBTHgI4sTZcD5mItjFESrxeJEZxHNHEapEgiigEPMSfHwdsX5i++vMSldp3t09rsCDoIiM\n" +
"s90nGIbpZTDIKIM+A1oCI19AIAdCwCEA+LE0s+vfa3VWHVYRiB9LPZ0gjGty4E8AIYnrJqQ2WfQi\n" +
"wo9MSMz/cV0VVhEwjXRCQsDMvJOR63AVGQtMUKfsiHIaX/Gz1haRFcYzw59T63IHSWJalDWNInuw\n" +
"PEtXc8CGTXth4Hoeq/ZRruegtuu0JNUdJ6C67ShUCYfaxsPQIEirNrB3OYTVnNYSGcWJcjzEMms/\n" +
"dJk1RG9AKIzWucKEwW1OWMwJlDWzgDXhMqc/MXfcCwvbXSy/NsOQQOgZukB77UaWZ9uYgfbBgK+l\n" +
"z9ddIw5uHLsKk7NecLgchFMxqWjqvYfBn8ODvHzUh4Ob7bFn3z5k0TxP+IwJMEQZJbJGTCSDloDE\n" +
"RonfX4RIo74ITjFyy0fvSIIRIRlvEawTZY44FyHKHBHokokO8JHACPb3gf94xggJ8JUAmYBGXFcV\n" +
"ySwjIBBBLwCIIZCTX0+UZ6JsE6/zPlswo4ijWZcvnZd0/twpnGfZdvHSOVy6zGVX3HD63DlYpRVC\n" +
"LTIVusllEhjaUlao/xMYBAST9SEYJoAQ5Y8Exfj8jwGRMgOhnJCqpBq5mCHUJkl9ktSYOZQzK6CU\n" +
"VQMVlnDuB71Rt2QDClnLlxMSYb6Fx2hRcnwPwH8mAYjIHo2LbFE6zwxnVtlBZfsBrN5Jz3HwPL3H\n" +
"GaiytFJhWbVy61Gs3HwIKzcegorLQag6H4AKzbfoVZ0PQo3SoAQs2swQayhdAqPHeQOCtHbLcRhv\n" +
"PwnTnadhsusMzPaeh/nBi9QFWLG3IIzmG4/BeNMxGG09BSNmL+MT7lh7zhP6Fz2he8UXBu4BsPII\n" +
"wna/WLhnFKHj/iMMjcp+ekAGmUGObHeGuYUl4uKikS2OUjGDpCTSJzBriIwhwJjIIBOAxIjrnigR\n" +
"xKLEEaVUNJeHhwVKl58LiTPZQQQikBKAiJODQZSAI4Clk4Aj0NfrPSxiOoSPCeNjhQRcImtMnNMQ\n" +
"/kK85kRmERLbiJOEHu5XpPJL6AKNu4Dj5IkjOHvuJNwunMGFi2clSM64HcXm0GQYiyNQzCTisnNN\n" +
"AQeh+HNwCGkwUCfrvwLIBCSixJJDMS4GvCoNv6SsWqhNkrqAULzOuNTSa6UjVxsuxSDL/DCKxGHb\n" +
"FeNl1ApHyW+IHx61KBGA96CIfrIIxoSWOUg/fBLnP9IWGcNe2xpLXfZj2a6zzBYnsIJeYzkzxrJN\n" +
"h7BswwEsdzmAFdRKQqFMrXDah5WERJnLVDYcghpBWEUTL7Sahl5ryzGsIRSSf2GJtmrPKWlaa+dJ\n" +
"aO09A21CqHPQDQaHL0F3vxv09nAZodQkPKuPXobametQO+sO9fM3oHneC2sueGHtRW843AjBsbh0\n" +
"NPXck+6G89N7kOGXOL1/O0yN1+L0hSvITBVmO04qowQc4iesAoxIBr6QmBeACFCiwlkaBbNk4nIx\n" +
"8k+UTqFBvgx0guFHCPyZOSh/Hw8E0IwH+HpKcAggRC/kR38g5v3EvICIkIjgF5lHSGQHMS8AmYDD\n" +
"/fplXL1yQQJCHOIVunTxHL0JMwanT508inNnT+IsDb+AROi822nOn8K+6+4wjcuHZlSmdI5jNf3E\n" +
"GkoAImD4cbYQ+iAcP9K/W5fBaWYJNT7PeyB+JAHFKvHa4xLz77MLpZxZB92IAnjt9ESRmjPKl4rD\n" +
"o7ZoFrfQWUlAxFl+UWYJOFY4oZngCCiaCII469+yXCxzlCSWNdJ/1EiXlVgijKbZSNcOS6y3Yv66\n" +
"XZizfi/mM+jnr9+HBfa7sMh2JxbTYC+234NFjq5YymVLHXdjyfo9WLphHyE6CKUthwnVEShRol/J\n" +
"0mjl9mNYsfM4lHafgNKek5KUCcpy15NYeuA0lu8/jRX72O8+jqXbDmPxjiNYsJOvu+so5u46jtlc\n" +
"PmP3MczZfQo/7DuHGQepE1eg7hmG9JudePtzADImG0ak33VYGunC1m4dwln/p9N0pybJfYdUThGK\n" +
"iFACQBBEL8AQEvMCBmnEZ4kkJMolkQWCA7wlICbgeC+CMJEthCbmxSUo3vQNolwSpZKQyAoCBOEl\n" +
"RJYQ64TEvCihJsAQIJw5fVzKGhIUZ07g9KljEiznuGwCEJFFLhCQk1euYD3rWJ2EfGglFWMVR2eR\n" +
"QSbDMRmIH2syAEKqk0UYJpdKK9Np1rnsT7LFfyA1ajIg6sxs6jl1UM1h5hBZLrkGB93ikGNyDHUq\n" +
"ztJ9plqVnSQYWpWd0bpyPafXEwCCoLQedQShlpmikga8bJm4ctcKBcutkKdkhVz2ucsskc0+RsUK\n" +
"pw0coa6/DjP17THF1AVT7bZjivU2fGfigimGjpi61gFTTZwwzcIZ0602YKq1C6bZbcJ0hy2YtX4H\n" +
"5jjvxlyXPZi7wRVzNlL0MnM378PcLfsxd+tB/LDtIGZTP3B+FtfN4HbzthyUoFhCLdp8gI/bS+3D\n" +
"VD7uk/W78JHzHvyB2/9u1yF8tucsvjx6BV+e98T3N0LxQ1ACrtU34fmbtz8DIGOj6GqpgoulATTU\n" +
"lLFx6zaEMhOkJ8cilR5EZBIBisgYE6CIXsAxkUGEBBiiVJI8BbOHAMPX6/p7QPzEeQ7PaxQhIAwC\n" +
"CpE9fG5ch7fHNUk3xoEQPkJIZAQBgsgSYl6smwBmInNMZA0BxgQgoheQiGk3UW6JzEFAhBc5c5Yg\n" +
"uZ3FVm9faDODrI4rgGbaeHCOw/GfAfJjScH/XiLYmQHGpZJN38DgV2GgqzLg1f6MBBCTARHQqPO5\n" +
"tJg51COLsIlwxDldQJmaC2oIRw1LrFolOzQwm1Rqb0aZ/lYUGW1DgZUrCmmWc9YfRKYQvUEqfUMi\n" +
"y6T4rYcQz4CMYx/LAA3duh/uW/Zhg8suzDKyxcdrzPGRrjU+NrTHx7rrOG2Lj/Rs8JGBDf6w1g5/\n" +
"MLbHH8wcqPX4A2H5A0H52GYTPrXe/F4frduCj9dtxSf22/Cx/XbO78QfHHbh94678Fv7Hfit0258\n" +
"6rIXy1heaZ+5htWUxtmrUDt/Fepnr2MJS6rPD5/H58cu4utz7vj2sjemugdjtm8U5ocmQik2U/on\n" +
"RkFtXXjxc5xJF7foEddhxQZ4QF9TGWtNjWmMA5CTnYrsbBr2rGQpmyTFRSCRipNukhBEoxyK2Fia\n" +
"6QhxpIqZg9lDMuHiSFWwMOcsmXzd6T88ucyXvbc8A7BcEiAJoES55XlDZIrLhINg0ERfv3pBOjN+\n" +
"lX7hEkf8q1wmSin365ckSMR5D6HLwniLjMDgdztPM04oBCgCjAkPIual6fFMcubMcZxlpjlz8TiO\n" +
"XWOZFZ4EpeAMqKUWM3vU/ykgf0ZSAItDvNlNDOAGlk4MYIIhAlyVXkaVZZp6Ltex15DmRbALCOqp\n" +
"BqhxnVoO14vHc7m4Z68eSygTguaQVo1NLPn2ZJTiaHohTqYW4DJ90gm/ePgfuYGkbWeQtPEA8liv\n" +
"5+85hzzW7sUn3ZHl5oWcK37IpYnN8Q1HTmAUcoJjkB0SI/W5oXHIDU9AblQicqisiHhkhschLSIB\n" +
"8WGROHDuKuZs3oOPNmzHb5y349/st+I3TjvxO2aF3zjvwr8578SvXHbi34Q27sIvNuzBL5kpfsls\n" +
"IPXULzj9L5v34lfbD+GXOw7hF7uO4Jcsj3615wR+vfckfr3vFD6i9/jDUTd8deISVD2CocP3qukT\n" +
"gtU+QVjjFwpt/3Co+IRhvnsQFvtEQikgRroDpAq/J/Gvos3jMrE5JR8XiypQ03sHg0PMIGME5L/J\n" +
"yIc9iLif1egIHvd1w/PCCax3sEd8ahZutrWgvrZSOuxbmJWO1DiRSeRmPSFWHFGKQDLBEUetxNEr\n" +
"cWRJGGhx1Ck0hCad0IQx2whNXPoh4BFQiCwz4T8EID7MLjc8mD0Y+J4CFsIgALlCTQAhAJHD4ibB\n" +
"cZHGW4AhAJHDck7KGKIX2UX0E9lF+BFRcgl4zp46i7MXWG5dcINjYCi0YgqgzaDUzG2hF2GZNUmT\n" +
"R/RVuQ2SVuc2UvUMfkKRSzi4TD2/UZoW50VWiRu+5bQRgmao5VPZLQSOHocmXS+1DCapRXBIKsD2\n" +
"+DycSMjCxeRceKUVICCzCIE5BQjLLUR0XiGisosQl1OChMwSJGcUcZBKR3Z0ErJjWGZFxSEvOgF5\n" +
"sYnIi0tCYUIaihLTUZKUgaLkLBSnZFM5KEmdpLRcFKfnokiI04VpOchLptKy4BEYDjUxYh++gC+P\n" +
"X8WXx67hC9b6Qp8du4TPjl7AZ0fc8Onhc/jk0Fl8cuQkPj16istP49NDZ/AJvcHHh4TO47PDF7n8\n" +
"Ej49dhmfH7+Cr05ew7en3fHdGQ9Md2M2uOKLeTcCoOQdDmW/WKj7xkDLLwbavhHQvBEMLQ9/6Hr4\n" +
"wsTDB/a+gdgRFgV/7p+EwkoU1jWjseMW7j98wEF9ePxSk7GfFhD5DYDHaNbf4mFvJ47s2QkDYxuU\n" +
"llaht+cuujq70NF6E631jaipKENBThbSkjjyxEUTBnESMITZwVcy0ELiiFNkBA18pDjcK85L+Emm\n" +
"WsAhFCjMNpcJSHxpzsWlJv408yLDTJRlAhAByjVmEwGOgEhIQCJ6L5ZqYrlYLwdGXoZNlFqi9BLL\n" +
"RMaZDIuUia5dl/rrXH46MAjaAbFYHpIK7bxmrGFAT0icFJwszcJmrC5owioBQ0ELVAvbCUYbs0cT\n" +
"VoqskN0o3e5HN6MCpmmlcIgvxo6YYpxILIJbUj7cCUJAch5iU8oQl1GC+NwixOaWITG/AqmF5cgo\n" +
"KEc2p3MLqpFXUItcBoRQHpWdV4qCvGIU5xShJKcQpdkFKM3MQ2lGLsqowswcFGRkIz8jC4WcL87M\n" +
"l2+TLbYd354q+ZFKc/mchDE5JQMWLHFm7D+JWWevYNrpy9QFzDx3GT9cuIY51NzJOudB3cA8lkML\n" +
"L3pg0cVrWHThKpZw3aKzF7GEj1Nyu4qV569A5dwlrLp0HZpXPKB79hr0mK2M3K7B9soNbHT3wv6A\n" +
"IJyPikUoQU0tLEZlXQPaacB7e+/jft9DPOp7gP7+bjx42I9HDx/j0dPXeDk0iOHh2xzYcxm7LxjF\n" +
"o3JI/kpQPgiIqOHEva7GxmR4PHAXN9yOYtXKJdAxNOWXmY7uu/3ounMPfX330dfTg96u27jT2YHO\n" +
"W4SmuQm11VUoKSpEVmYaEhNiJXDCQ4Ph7+vNAL3MQHWTjkJJQcmAFVfwThy1EqWWdC0Wzbyf6AUs\n" +
"kl+5Dq9xMOTrPViyiaNa7hIcUknG9UIe7qL0kh/NEhCILHH1yp/6Fak8G89CXje4zOMyPPga3p6e\n" +
"cAmPhUFWCXSzKqFT2ApdcZseSkf8azSCIu5AoksotPMboJ1bizXMHPpZ1TBNLpMywca4dLhGpeFs\n" +
"Ui58WBaFZhQihs8Xl1WMxFz2DOwEBnhSXhlS8suRml9GGCqQVViFzMIKZBKMrMJSSdnFBKSoHMUl\n" +
"lSjmAFVcVo3C8mqUVNSgnPNlJVRRFUqLK6kKVBSWoTy/lCpCBSX6svxilPO5hCqKyt5LzJdyXUle\n" +
"EYqYqQqz85EvxOncrGxEx6fgrKcf9jCQd1J73X2w97oPdlxyx0YGudBWBvrOqzew57oHXKm97jew\n" +
"18MT+29447h3ANyCwuHB7OYXm4QwZrS41Gyk8TWyC0uQV1KO0opaVFc3oLmxBZ03RRzdQU9XF+72\n" +
"dDPGOtHV1Yk7t2/jblc37t25jd67N9F7vxUDfVXov5eC/r5ruD/giqdPrPD6jQntwTbGby+DeIQS\n" +
"meSvI+SDgExu72jYezubceHoTmirL8fiJUvgsHE7AiNiUVFZg45b3aR5AAP3+9BPcPp6+nCX8HTz\n" +
"g3aQ+naRaZrb0NLQiMbaWmacClSXlaGqrBiFudnISElCYmyUdKZcwOFHcHxuXIOX+1VcucBy6NwZ\n" +
"XGTJ5H6N5pze4ZLbCVy5eAbXLp/FDXEk6zqhIRCeHhcIzxUCI456XZMA8mbmEbAIqG54XGcvAGCG\n" +
"Eut8OO19DT70RD7ehI69p1jGxxz19oNBRCKME/JhmFoKq/x6SbbUegKxKacCOxjsR1OKcSYhB+5J\n" +
"2QhiiRKZVcjAL0MSAz6BgZrELJDGgM/giJ/JGvmPIgjjyqJyiquQW8IsUVqDfAZ5IWEQyicYBZzP\n" +
"43NlE6oMZohCLsvLL5GyeQnXlXI7oTLOC0CKCsYzC4O+tKBE6idPlxMMMV0mgOG06CerqKBYUiHB\n" +
"KsjLR252FnIy8ghMhnTPgYJcKo/KFz984/spED+EI1wFLNMKqaJclJRko6wsC5WVuaipyUdtbTHq\n" +
"6krQ0FCGpqYyNDeXoqWlGC1tRWhrLcPNm6W4dasInR156O5KQvedSHR3x+JOVxQVzAH4BuVGcA5w\n" +
"vRPXrcG9XjU8fLAML54uxJsXizA0uBCyMTUO6vYYeVfJMohgvHtDMZP8Fe0vBkT845oR2TAe3OtE\n" +
"gOclOFmb4oeZ07BoyTLoGZni+Gk3JKZloq6pDQP9j/D48XO+8ad4cP8x7vc/QB+huddLaHr6CU4f\n" +
"P2gfP+g9fvAe3O7oliBqa2lGU2M9/U01s08lamuqUFPF0bCMX1pRAUdAfrlM/QXZ4heI2chMSUNa\n" +
"YipSExKRkhAnvxYsQZxJj2OfhGSuS0tJRVJiPP1QBP1QNLONuCQlEGEhIYgKj0BkWAQiQsIRHhyO\n" +
"aJrSqPAohHFZeFQEfINCcTEyHm7xybgcnYqgnDKE5lciprgGiVQqA1qM9OkctbMY2LklNcjhSJ7J\n" +
"ETGntJIBzxKIgZxDGHKLGfhcn19aPUliXq4CqohZQS5OM9AnJLJFSXkNS6xSCRiRRQQg2Sypyrlc\n" +
"QFEuMso4FCKj5HFdblY+95VcuRn0FJksuURW4HQJM8bEOrGsorgcVXzPE6ooq0RleZWkiopyVJZx\n" +
"ujSG8J1EefFBvsY+brOLOsRtTlGnUV1xBjXVp/m9nSAIR9BUd4Dl9160Nu5EW9M29tvZ78DNll24\n" +
"2bqbg+ZO3GzfRCjWo6PdGrdvmeN2pwFu31ZFV/ccdPfMpGbjfu9yDPQuwv17c/CgbyY1Gw/75uBR\n" +
"/yxWNjPw7NEcvHy8EK+fKmPwlT4Ghzcyg1yG7F23/MbpUhb5qQGhHxkaofkRv09/9QwNVaU4dWAn\n" +
"THVXYdmSRVi4eBFWrdbCeufNOMf6MjIqkSmzCffvP8WjB4/xYOAhHt5/JAdmYFwEaYDw9LNEE7p3\n" +
"9x7ryx7cucO0ynKtu7uLO6yTI0oHbt1s4iiTjJtt7rjV7Ib2llNobT2IluajaG44icb6YwTrEOpq\n" +
"9qOq2hVVVQdQXXWMgPELqz6PKn550hdZxi+x0g21VW4c1S6husaDX2ggl0WitjIB1ZVp/JKL+HjC\n" +
"WVGHkqo6VFQ1oqymGaU1Taioa0F5XaukitpWVNawF/O1XF7bzOkGLi9HeXUhH5eNyupYKp3THKmr\n" +
"alFa+UeVsKyYkCgxylgulbJsEhJACDCEpHWVdVImEdMVVfUoE8AQhpqaRn7O+veA5NI/VHKdgKSY\n" +
"8wIEEfwiW4jpagZ6Ec3+RCap4WuJZfX8nA3V9X9UXSP3Z8P7vrYqnq+lierSb1BV8gX7z/nYz7iP\n" +
"vuR++1pSfdU3rA6+R2Pdt2iq/wYtjd+hrfl7gvAdvzehb/k9fsfvcwq6Omahp2MOem/Pwt3uGbhH\n" +
"GAZ651DzMXB3HjWX03PRz2X3785hVbKQg+xySoUllR49iCMeDuzE04fn8OyxD54/icCL5wl4+7qQ\n" +
"HqSNHuSpBIawCeI/H4ub5f417S8vsYQfGR2V7tAuoycZoTd58qAPpTlJOLZ7EzRVl0mgLFq8FEuX\n" +
"K2G15hqYmFhi797DiE9IQVt7Bx49foynT59JekhT9fgx4XlEaB48oO4TovusKfu4E+hr7t1jydaH\n" +
"e/13cbe3A73dPoTHnsuMuV4fA/d0uI067t1TYU2qztpUhTt7KWvXBei+NQ9dN+dTC6nFuH1zKTra\n" +
"FuNW6yK0Ny/kKLaAo9k8NNcvIFiL0FCzDHVVSoRCieWABmEyREW5FcornBmMuwjZEcJ+igFC2GpP\n" +
"sz9JMI4y8I+iQqhmL4HYgbLqDextuc6IMOgzqNegrEqFUkNV/WbUNqSjuqEZNfX0ZwStrmGS6pu5\n" +
"rAk1tY3vVT2umtomqRdQiMyRz+AuJ0zZHPlLxGgvlhOGIpZgIoOIkktMi5KrgDCUcb6spEICporb\n" +
"lgh/wvnSolIGfh3qqusY0M2s/1vRwgqgtZnlcMtNDj7tkhob2pjZ/dDcosL9p8X9t4Tifmyex23n\n" +
"UvOY/ecRhPlob59DIz2X5dI8Zoa5LL3n0ZPyO7i1nN+DEvsV6O5Yia5ODWYKbZZKa3CnW4dVhTEh\n" +
"Med3acPemXIlHCeYPS4zBvypCPQPpHDAzeUgW8YBtp4Vyk08edyLZ08e4vmzV3j5aghvB2UQ//pA\n" +
"xOro6JD8QJPIIozZv6b9FwChpHQlDp+NMJPIOD8q3Uju/r0e6ee4p4/sg+4qZaxYvADTvv+OsCzC\n" +
"gkVLoKq2Cjq6enBwcsSxYycRFhqFKo6g/X0P+MFe4vGj5wTkCT8woSE4Dx88ZHYRwDzgKNFPxRJG\n" +
"Gzx9vJyjhTIes+Z8+mA5ly3D4/tLKXn/6P58bvsDHt6bzVJwFh7c5ahE9VN9vRylhHqm88uYKqmn\n" +
"axqN3zSOZlP4xX2Pjo6vmPK/5hf+DQOCo2Hjt2ion4aG2tksGeagXqh+LmvoOWho/AFNzXMYNPNY\n" +
"T89kPT0DLa3T0dw6BU2t36KZI2aLUMt3aG3i87CvbXbh+ipmPpaT7be4/R/VzIBsEsHIAG1goIp+\n" +
"ssSyeoIkwBEgiXnRC9XXNaOOEIleqKGewAmomHXqmGEauKyGWUbMNzW0SssauY3IDnU19ZIxbuN7\n" +
"aG/r5AjfJflJ0Yv5ttYOBv4tDjD1uNWWRhUz+Atw81YulUFlcz6PMFAdBejicqFu4SNu57FUKmAl\n" +
"UEYIKqkqqho9d2pwp6cWd3rr0XO3AXfvNaL3XjPu9d3kgEcj3t+BuwNtuNvXyeU90kGg/j4RE08J\n" +
"hpCIEQ6uD59JsfP06Su8ePEGL16+xZvBEQyPjKLlZjf6aO6Hh4bl/+vypwfkHf0O4Rhj2hp6g7Hh\n" +
"txh8/ZJv7DlTG9+kONR2txMdNGE+NM9nj+zHzOkCkvmYN28O5s6Zg4ULFmDBgkWcns/pxVBV0cCO\n" +
"7a4ID4uRvlgByVMBzONnHCkeMVuIw3lNLM228PmX4snD2eyXUHPx7MFcPL0/F0+EHswjWHMJzhzq\n" +
"B8IyHY8GplFTCRl1X2gad+p0qZemRT8wnTt9Gss8ufrufkeAviU839IjfSOp5/aXzEqfMxNNYWYi\n" +
"UB1U5zT0dE7ltFzdXV9QX1HfMAC+p7lkCdH9HW53CX2GO+3Tuf0MlhYaDDwvdNy+iVssHW/e6voT\n" +
"td+8TXAEPJ1obev4dxIgTZ4XUIleBHFrizzAxbQI7hZmASnIOd8m1kmB3sEyR3i9W1J/i1ld6Gbb\n" +
"Lfa3pe0nAOnsuCNJTN++xTL3Vj86u7jsNkvfWwMEoJ/qo3rQ3dnFgaZbUifL4s5uPqbnDkvlO8z8\n" +
"d7g/e6l7lLgj5wAHq/vUgHQwp/+eEKf7Blg98PvuJwh9rCruUewfivn7TzDAwXKgf4BwcODkvNzf\n" +
"sqfXfcyYef7yDV6/GUJjcxvcb/hgnZ0DqooL5P99gBKx+9e0vxwQ/o29Y9YYHsToy2d48eg+R/F7\n" +
"eNDTiR76g5aGGumIVFFeFnLTY5EQ7oerZ47D2docKspLsHjBPMybPQdffP4Jpk3/FrNmT8XcuTMw\n" +
"Y8Z0TJ06RdLiJcthYGiK/QePIZCmOa0gmyVJMHe0GXeaBnfSMu4kDe6YFXjQv5w7k8ZtYB6DfS5H\n" +
"E0LyYIGkp49+IESzCc5sgsOeJk7o6WNCJSTN/0DN5AgkwJnK5/ieQM3iF0KI+qawhPueX9wUQvO9\n" +
"pHu9X/FL/Zpf6jdcLvTteE+QeqZKunt3CstBZiZmpzvdUyRYblN3ur5ET/dH6Lo9nWBY4lZXKcG5\n" +
"y2ATASfU814dnXc4Enf/O3CExPKOzm5p+iYlthWaCOSbDHIhEeSil4JbBDr7DhH44xIwdPJ5O2+J\n" +
"/ja34TSfewKKiefrut1L6O8yyPs4fZ8jfi/6bhOMni4OAneoHg4Evfws3K77Lufv8bP3cV/04y5B\n" +
"6KV6RN/LecJwlzAI3SMM98SpgXGJ6YH+Z/x+mR3oWQcY/P1C959JkntWAcQLfk8vJDAENKK/S29b\n" +
"UVOH024XoblGDwsXLcPyJYtxYJM9B8oeaWAXHuQdK56/pv3FgEw06QWpkcFXePOcJdEAR4fb7WgV\n" +
"xjQ/E1lJ0YgJ9obnpdPMIq44sGMDtjhawExvNZQXzsU3X36Bzz/7DHN+mIk5s6Zj5swZmDVrJqZM\n" +
"+R7Tp0/FrJnTJU2d8h2+//5bzJs/C0uWzoGeyUxs2bsAO47MxP4zs3HDcz68vBfCz38pgkMWIyZ2\n" +
"GRKTlyI9czny8pehqHQhDe0i+oFlLEeUWfKoo+2WFmtfNX7ZrIW76E+6FxO+FfxSl/MLXIQBlmgD\n" +
"92kQ79MY3p8lZReRgR4KEZz7fVMlDdybSlCmUN9xmhmH0PQRqL5703Dv7jT0cl1P73fsvyUY7EU5\n" +
"1y3KuRkMcmaRjhjqllRmtYrD363cf+ybW0QpJfxII/1QraRyGujycqqiWjqcXsllNSyLaqm6Oppo\n" +
"epomPkZSU4vkGUQ2aWum2IvnbWu7yezCrMHM9D4rdHLkZ/DfJgRdIsA5/UdxGZff6RYB38++T+ql\n" +
"wGew3xNBPqG7A1zGgGcvSuZ7dxn09x5IGmBZNHBP6IGUFe5xfR8zgtAAg36AJVN/H7NDv9AjOQji\n" +
"IM5EhmBFIYHAXkB2i1DXNTShsKwcEbHx2LXvCJRVtTF95hwOuDOwbOlCrF2jgSunDjKzN9Oo/3VZ\n" +
"Y3L7L5VYQhPToq4TvTBB4u4nw29ectTmiMHyobWuHMXZKUiKCoLvdTecPrQL29dbw9JQE6uUFmLB\n" +
"rCmYPuUbyadMJRhT2H/26Sf49puvMXPGNMIzixB9it/+9nf49ttv8T23mTp1GgH6Ht98+ym+/Ooz\n" +
"fPb55/j8C8L25Zf44ouv8dWX37yHbObMHzBn3nQsXjYFKurfw8B4BizXzYDTllnYsHMmtu6dhW37\n" +
"p2P3oWk4cGwWDpyYhkse8+ETsgAh8QsQkzkXaYWLkVu8HMUlS2hm59PgzkNp8RxUVMxm3U8v0kAv\n" +
"Qh/S0MR5epIaepPqunkEci5KyqYjv2ga0jK+RnzSt4iNn0HfNRt+vvMItjmuuJ/G+csXcO78eZw6\n" +
"dYq+7Jikw0ePwHXfXuzcxf21fTs2btwIBwcHODo60r85Yb2LMzZw2dat27Bjxw7s2eOK/fsP4ODB\n" +
"gzh8+DCOHDkq/frzzHk3nL9wCafPnsPp0yx3z57FhQsXce2aOzw8vODl5Qt//yAEBgQjNDQcUVEx\n" +
"SExMRnpaJvLzaepLKwhnjQRfGwHrEFmGme4OS6Xe3nsSGH3MAkKiDJaLENx/KI3q0ggvASCOUsol\n" +
"LRMBL7zmo2d4RE34CNGL57h7t096jY7bXahpbER+SSkS09IRFhGDax7e2HfgKMytbLB8xQp+11Pw\n" +
"7dffYvbMaVBRWgIrI11cPXcClSV5ePnsMYaHxX+Y+utODk5u/2VAJl5UXOkr3XmRvajzxpjCBCyj\n" +
"7KX/NCUbwlt6lCcPmZpvtqGurAiZiVEI8bmOi6cOYddGB9iZ6kln5uey5Pr2s9/jy88+xtdffSHp\n" +
"1//2S/zil/+Cjz/+CB999Am+/PJrlmZT8DlLtF//+rfUL/Cb3/ySEP0SH3/0G3xCTf3+K8yYJjLP\n" +
"F/j004/xzTdfYNoMQkhNmUbQpn6D7wjjp3yOb7nt91O+5mt9hY//8DGmTf0es+ZOwbzFU7F05XSo\n" +
"as2AluFM6BrNxBr9KdA3/h5rLabBxGoqbNbPhP2mGXDcMgPrt8+A/eYfYOMyC1ZOs2BhPwPGllOg\n" +
"ZzwN6lrfYdkKlo5Lp2DOnG8wfdqXfI/f4Btmxs/4Hr788nN8/fWX+Iqf9/PPP8VHn/wBv//od/jd\n" +
"H36L31O//e2v8bvf/QafffYJPvviU3z86UfSe//yq885KHyGL778THoO8Vk/+vj30nafi4GD+uij\n" +
"j8anP+VAIgYV8ZgvqC+l4BIDjuhnzZqNRYsWQVlZGatXa2Lt2rWwtLSEvb0DAd0kgbp7927s27dP\n" +
"AvDkyVM4d84NbixpLl68LIEndP26OOHqjQD/EAQFikuJohEbm8Ssno7UtCwkpaRLRzPDGezBIREI\n" +
"CgpDYGAwfHz8JXDPnDlDyDlAuO7la+6Anb09DPheBAw/zJzF73U61FRU8f1337Esn4bli+bDSHsV\n" +
"9m5xRlSQj3TXz1fPH8n/25lMxOTP8Jv0ye3HgMgvBJNfqyVqPPl1W/L10r97lmAZlU9LEvPMNDT4\n" +
"gvB+1rE3m+ukm1xnJMchMtgP1y+fw/GDrtixyQnrLI1hoKWCVSsWY8lclmM0/DOnfInvv/oIH//m\n" +
"FwTkn/DLX/w/+Jd//v/hX/7l/8Y//8v/i08+JWTMLiLA/tc//X/47e9+LUExjZlnxoyp1BTC8w2h\n" +
"+z2mc3rWrGnMPJ/zeX4hgTLhhz755BP84Q+/xzfffY3vpnxLqL7FJwT46+++kJ7vey77lsvE8m/5\n" +
"fB998hG3JXzMYF9+/RV+9/vfMdB/z/fBYGeA/57973//G77uH/A1of362y/xu98SagLx3fdfMyt+\n" +
"yef4HX75b/+MX/zqn/Cvv/wn/Mu//i/80z//vxwk/pkB/gk+5eDxq3/7V27zr3zPn+KbrwkXP6vQ\n" +
"5wTnd7/9FbMu38c3X/J5P8K//isfRzi++fYrfPvdV9Lr/o77Q7yHb/lev/7mK4JFyAjMt99+J+nr\n" +
"r/lZCNbHH3/C5yCwH38srf+K+0bA9TU/m9C06SKbT2cvxGnutzlz5mDuvLlYsmQpli5bjuVKSlBW\n" +
"UYa6hhpWrdaABnullUpYsmwJFi5eSK+wAPMXzsOcubMxg6W2GLi++ZaDB6uIr7/+mu/9M3z33Xj1\n" +
"wM80exoHHa1VWMzt9VYrY7O9JdyO7kViiB9qCnNws6EKD+52Y/D1K/m/Bhz7GS53/6mbBJSUgUYw\n" +
"NPgWr188k45pPxDnPro70NnehPamOo4OlagpL0JJfhbyMlOQlhCNKAIVLK7Fun4ZnlfO4Yrbcbid\n" +
"2I8DO+l5nKywztwAlmu1YMyaVHPlUqxY8AMWzPgeM6d/je9Zpk357jN8/zVH4E8YyL/6F3zyu3/D\n" +
"F5/+AZ99/Fv8GwP0337xT9L8V18wq335CX7xz/+flKW+F1mJmWc6NeW7L/HpR7/luv8HH/HxU77l\n" +
"CM3H/JpB/btf/yt+S/2eWe7j3/9aWi+e+zs+fioD9jsG+DQCtpi+TE1FCbqa6vRp2lhnZoRN9tbY\n" +
"vcmF/m0b9u/YiiN7duHY3j04ts9V6g9RB1x3Y/+endi3ewd2b9uCzc5O2OTkiI3r7WFnbQ5D/TUw\n" +
"MtCRpLtmFTRUV2D+nFn0fdNYlkyVPsPXzCxCUwjL99QXzETic/7qX/+Jn+FfOf2/8AVhE+9VfM5P\n" +
"/vAb/OZX/4wvPiPohFTsi6nTWNqyAvj8q0/w0ae/w/ecnzbze3z+NTM9P/u/sRL41a9+IddvfoVf\n" +
"/vqX+JIB/zUHho8I9q8J7efMbuK5vvvmc3xL4L/6/GPp9b4k8GKfLpo9Tao0rE30+d3asmTfAz/3\n" +
"C0iNCUVZYTZjpAYDvZ2SJx4desvBWfyvTGaQv/YKxUnt7w6IuMGXyDaSl2HGefnqNYaGxWE5eRaS\n" +
"ZyLOy4YxxvQ5ytpyZGgQbwdf4w0l+levn/Nxz/Di5XM8ffYET58+xmOC9vDRAO4/vIf++z2429dN\n" +
"49yBvp7bhO8WujtaCWCjlMVaGqpRL850l+SjqCALuZnJ0g/C4mLCEBUm/zlx6PgvIX29xA+7rsLf\n" +
"+zoC/W7Ifwnp64Egf0+EcTQTPxgTv5HJyUqR7iFWXpyHOj53U10lWvhFtvP1uvjad+/wvfTeoZHt\n" +
"pYHtxYOBu3jw6B7fcx8ePe7Dk6cP8fzlU7x4IfQEz58/4ed7KvUvnz9jFpbrxVMue/KEA8tTPH38\n" +
"GE8ePaDu47HQfXEO6Z404IjX6L/bg/5e+cV+PV0duNnahPrqCr5HcSf+bORmpSElMVa6e4z4Bai4\n" +
"Ds79ykVcuXAGJ48ewOH9u7FrGwegDY5wdrCBi6MN1ttYwMnSFFaGurAglJYGurAxMuC8HtYZ68HR\n" +
"TB/rLQzhYmUERwsDOFsbY4uDJT2pDXa62GHf1vU4vncrzpw4jKsXz3K/uks/vBOXDeVyMBQ3Qe++\n" +
"1UoAuqQDQk8f9vEzP+Rg+gSDb17w+3+FkZFBxgjjQ5yCeCeqFiFWNP/4gIhrvOSSjQnj/46QEAqR\n" +
"WSRNlHDyDyxfLn/cOz5oTMZlQtL6P9UoJRPZSQDG5xX/p52T44+XP7/wTRJ8lCgTxcEGcbNu8XiR\n" +
"pofFpTX0V2KZfPmfbiNJrOPjxYlTcXWB9Hzi9blcmmcvPoN0FQKfU2w38Xj5gQ759XTyAUG+D+Tv\n" +
"V/g8uWT8rGK/CEnrxPbcf0LiGjnxn78kSc8rD4uJ55V6bsiHSs8/sR/Ea0vPJ94Xa3YxOMnfo7yX\n" +
"9g8lBrCJ9yv/POL9i88oHi//3OLKCrHNKJdNlNbScm4/JvafeE4uH5G2l7+GeK/y/5svn+erSc8p\n" +
"7U/p+cVrypdJn136HsR7Gf++xyUdwuVnEpL+Fz+fSXo2sR/Eh/9vtr8rIIqmaP+7NwUgiqZoH2gK\n" +
"QBRN0T7QFIAomqJ9oCkAUTRF+0BTAKJoivaBpgBE0RTtA00BiKIp2geaAhBFU7QPNAUgiqZoH2gK\n" +
"QBRN0T7QFIAomqJ9oCkAUTRF+0BTAKJoivaBpgBE0RTtA00BiKIp2geaAhBFU7QPNAUgiqZoH2gK\n" +
"QBRN0T7QFIAomqJ9oCkAUTRF+0BTAKJoivaBpgBE0RTtA00BiKIp2geaAhBFU7QPNAUgiqZoH2gK\n" +
"QBRN0T7QFIAomqJ9oCkAUTRF+0BTAKJoivaBpgBE0RTtzzbg/w8Jqj+dzI+GCgAAAABJRU5ErkJg\n" +
"gg==";
return;
}
}
| |
/*
* Copyright 2001-2014 Stephen Colebourne
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.joda.beans.gen;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.bind.annotation.XmlElement;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.BeanDefinition;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import org.joda.beans.impl.flexi.FlexiBean;
/**
* Mock person JavaBean, used for testing.
*
* @author Stephen Colebourne
*/
@BeanDefinition
public final class SimplePerson implements Cloneable, Bean {
/** The forename. */
@PropertyDefinition
private String forename;
/** The surname. */
@PropertyDefinition
private String surname;
/** The number of cars. */
@PropertyDefinition
private transient int numberOfCars;
@PropertyDefinition
private final List<Address> addressList = new ArrayList<Address>();
@PropertyDefinition
private final Map<String, Address> otherAddressMap = new HashMap<String, Address>();
@PropertyDefinition
private final List<List<Address>> addressesList = new ArrayList<List<Address>>();
@PropertyDefinition
private Address mainAddress;
@Deprecated
@PropertyDefinition
private final FlexiBean propDefAnnotationSecondDeprecated = new FlexiBean();
@XmlElement(name = "additionalAttributes")
@PropertyDefinition(get = "manual")
private Map<String, String> propDefAnnotationSecondManual;
public Map<String, String> getPropDefAnnotationSecondManual() {
return this.propDefAnnotationSecondManual;
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code SimplePerson}.
* @return the meta-bean, not null
*/
public static SimplePerson.Meta meta() {
return SimplePerson.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(SimplePerson.Meta.INSTANCE);
}
@Override
public SimplePerson.Meta metaBean() {
return SimplePerson.Meta.INSTANCE;
}
@Override
public <R> Property<R> property(String propertyName) {
return metaBean().<R>metaProperty(propertyName).createProperty(this);
}
@Override
public Set<String> propertyNames() {
return metaBean().metaPropertyMap().keySet();
}
//-----------------------------------------------------------------------
/**
* Gets the forename.
* @return the value of the property
*/
public String getForename() {
return forename;
}
/**
* Sets the forename.
* @param forename the new value of the property
*/
public void setForename(String forename) {
this.forename = forename;
}
/**
* Gets the the {@code forename} property.
* @return the property, not null
*/
public Property<String> forename() {
return metaBean().forename().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the surname.
* @return the value of the property
*/
public String getSurname() {
return surname;
}
/**
* Sets the surname.
* @param surname the new value of the property
*/
public void setSurname(String surname) {
this.surname = surname;
}
/**
* Gets the the {@code surname} property.
* @return the property, not null
*/
public Property<String> surname() {
return metaBean().surname().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the number of cars.
* @return the value of the property
*/
public int getNumberOfCars() {
return numberOfCars;
}
/**
* Sets the number of cars.
* @param numberOfCars the new value of the property
*/
public void setNumberOfCars(int numberOfCars) {
this.numberOfCars = numberOfCars;
}
/**
* Gets the the {@code numberOfCars} property.
* @return the property, not null
*/
public Property<Integer> numberOfCars() {
return metaBean().numberOfCars().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the addressList.
* @return the value of the property, not null
*/
public List<Address> getAddressList() {
return addressList;
}
/**
* Sets the addressList.
* @param addressList the new value of the property, not null
*/
public void setAddressList(List<Address> addressList) {
JodaBeanUtils.notNull(addressList, "addressList");
this.addressList.clear();
this.addressList.addAll(addressList);
}
/**
* Gets the the {@code addressList} property.
* @return the property, not null
*/
public Property<List<Address>> addressList() {
return metaBean().addressList().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the otherAddressMap.
* @return the value of the property, not null
*/
public Map<String, Address> getOtherAddressMap() {
return otherAddressMap;
}
/**
* Sets the otherAddressMap.
* @param otherAddressMap the new value of the property, not null
*/
public void setOtherAddressMap(Map<String, Address> otherAddressMap) {
JodaBeanUtils.notNull(otherAddressMap, "otherAddressMap");
this.otherAddressMap.clear();
this.otherAddressMap.putAll(otherAddressMap);
}
/**
* Gets the the {@code otherAddressMap} property.
* @return the property, not null
*/
public Property<Map<String, Address>> otherAddressMap() {
return metaBean().otherAddressMap().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the addressesList.
* @return the value of the property, not null
*/
public List<List<Address>> getAddressesList() {
return addressesList;
}
/**
* Sets the addressesList.
* @param addressesList the new value of the property, not null
*/
public void setAddressesList(List<List<Address>> addressesList) {
JodaBeanUtils.notNull(addressesList, "addressesList");
this.addressesList.clear();
this.addressesList.addAll(addressesList);
}
/**
* Gets the the {@code addressesList} property.
* @return the property, not null
*/
public Property<List<List<Address>>> addressesList() {
return metaBean().addressesList().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the mainAddress.
* @return the value of the property
*/
public Address getMainAddress() {
return mainAddress;
}
/**
* Sets the mainAddress.
* @param mainAddress the new value of the property
*/
public void setMainAddress(Address mainAddress) {
this.mainAddress = mainAddress;
}
/**
* Gets the the {@code mainAddress} property.
* @return the property, not null
*/
public Property<Address> mainAddress() {
return metaBean().mainAddress().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the propDefAnnotationSecondDeprecated.
* @return the value of the property, not null
*/
@Deprecated
public FlexiBean getPropDefAnnotationSecondDeprecated() {
return propDefAnnotationSecondDeprecated;
}
/**
* Sets the propDefAnnotationSecondDeprecated.
* @param propDefAnnotationSecondDeprecated the new value of the property, not null
*/
@Deprecated
public void setPropDefAnnotationSecondDeprecated(FlexiBean propDefAnnotationSecondDeprecated) {
JodaBeanUtils.notNull(propDefAnnotationSecondDeprecated, "propDefAnnotationSecondDeprecated");
this.propDefAnnotationSecondDeprecated.clear();
this.propDefAnnotationSecondDeprecated.putAll(propDefAnnotationSecondDeprecated);
}
/**
* Gets the the {@code propDefAnnotationSecondDeprecated} property.
* @return the property, not null
*/
@Deprecated
public Property<FlexiBean> propDefAnnotationSecondDeprecated() {
return metaBean().propDefAnnotationSecondDeprecated().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Sets the propDefAnnotationSecondManual.
* @param propDefAnnotationSecondManual the new value of the property
*/
public void setPropDefAnnotationSecondManual(Map<String, String> propDefAnnotationSecondManual) {
this.propDefAnnotationSecondManual = propDefAnnotationSecondManual;
}
/**
* Gets the the {@code propDefAnnotationSecondManual} property.
* @return the property, not null
*/
public Property<Map<String, String>> propDefAnnotationSecondManual() {
return metaBean().propDefAnnotationSecondManual().createProperty(this);
}
//-----------------------------------------------------------------------
@Override
public SimplePerson clone() {
return JodaBeanUtils.cloneAlways(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
SimplePerson other = (SimplePerson) obj;
return JodaBeanUtils.equal(getForename(), other.getForename()) &&
JodaBeanUtils.equal(getSurname(), other.getSurname()) &&
(getNumberOfCars() == other.getNumberOfCars()) &&
JodaBeanUtils.equal(getAddressList(), other.getAddressList()) &&
JodaBeanUtils.equal(getOtherAddressMap(), other.getOtherAddressMap()) &&
JodaBeanUtils.equal(getAddressesList(), other.getAddressesList()) &&
JodaBeanUtils.equal(getMainAddress(), other.getMainAddress()) &&
JodaBeanUtils.equal(getPropDefAnnotationSecondDeprecated(), other.getPropDefAnnotationSecondDeprecated()) &&
JodaBeanUtils.equal(getPropDefAnnotationSecondManual(), other.getPropDefAnnotationSecondManual());
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(getForename());
hash = hash * 31 + JodaBeanUtils.hashCode(getSurname());
hash = hash * 31 + JodaBeanUtils.hashCode(getNumberOfCars());
hash = hash * 31 + JodaBeanUtils.hashCode(getAddressList());
hash = hash * 31 + JodaBeanUtils.hashCode(getOtherAddressMap());
hash = hash * 31 + JodaBeanUtils.hashCode(getAddressesList());
hash = hash * 31 + JodaBeanUtils.hashCode(getMainAddress());
hash = hash * 31 + JodaBeanUtils.hashCode(getPropDefAnnotationSecondDeprecated());
hash = hash * 31 + JodaBeanUtils.hashCode(getPropDefAnnotationSecondManual());
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(320);
buf.append("SimplePerson{");
buf.append("forename").append('=').append(getForename()).append(',').append(' ');
buf.append("surname").append('=').append(getSurname()).append(',').append(' ');
buf.append("numberOfCars").append('=').append(getNumberOfCars()).append(',').append(' ');
buf.append("addressList").append('=').append(getAddressList()).append(',').append(' ');
buf.append("otherAddressMap").append('=').append(getOtherAddressMap()).append(',').append(' ');
buf.append("addressesList").append('=').append(getAddressesList()).append(',').append(' ');
buf.append("mainAddress").append('=').append(getMainAddress()).append(',').append(' ');
buf.append("propDefAnnotationSecondDeprecated").append('=').append(getPropDefAnnotationSecondDeprecated()).append(',').append(' ');
buf.append("propDefAnnotationSecondManual").append('=').append(JodaBeanUtils.toString(getPropDefAnnotationSecondManual()));
buf.append('}');
return buf.toString();
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code SimplePerson}.
*/
public static final class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code forename} property.
*/
private final MetaProperty<String> forename = DirectMetaProperty.ofReadWrite(
this, "forename", SimplePerson.class, String.class);
/**
* The meta-property for the {@code surname} property.
*/
private final MetaProperty<String> surname = DirectMetaProperty.ofReadWrite(
this, "surname", SimplePerson.class, String.class);
/**
* The meta-property for the {@code numberOfCars} property.
*/
private final MetaProperty<Integer> numberOfCars = DirectMetaProperty.ofReadWrite(
this, "numberOfCars", SimplePerson.class, Integer.TYPE);
/**
* The meta-property for the {@code addressList} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<List<Address>> addressList = DirectMetaProperty.ofReadWrite(
this, "addressList", SimplePerson.class, (Class) List.class);
/**
* The meta-property for the {@code otherAddressMap} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<Map<String, Address>> otherAddressMap = DirectMetaProperty.ofReadWrite(
this, "otherAddressMap", SimplePerson.class, (Class) Map.class);
/**
* The meta-property for the {@code addressesList} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<List<List<Address>>> addressesList = DirectMetaProperty.ofReadWrite(
this, "addressesList", SimplePerson.class, (Class) List.class);
/**
* The meta-property for the {@code mainAddress} property.
*/
private final MetaProperty<Address> mainAddress = DirectMetaProperty.ofReadWrite(
this, "mainAddress", SimplePerson.class, Address.class);
/**
* The meta-property for the {@code propDefAnnotationSecondDeprecated} property.
*/
private final MetaProperty<FlexiBean> propDefAnnotationSecondDeprecated = DirectMetaProperty.ofReadWrite(
this, "propDefAnnotationSecondDeprecated", SimplePerson.class, FlexiBean.class);
/**
* The meta-property for the {@code propDefAnnotationSecondManual} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<Map<String, String>> propDefAnnotationSecondManual = DirectMetaProperty.ofReadWrite(
this, "propDefAnnotationSecondManual", SimplePerson.class, (Class) Map.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"forename",
"surname",
"numberOfCars",
"addressList",
"otherAddressMap",
"addressesList",
"mainAddress",
"propDefAnnotationSecondDeprecated",
"propDefAnnotationSecondManual");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 467061063: // forename
return forename;
case -1852993317: // surname
return surname;
case 926656063: // numberOfCars
return numberOfCars;
case -1377524046: // addressList
return addressList;
case 1368089592: // otherAddressMap
return otherAddressMap;
case -226885792: // addressesList
return addressesList;
case -2032731141: // mainAddress
return mainAddress;
case 1897330136: // propDefAnnotationSecondDeprecated
return propDefAnnotationSecondDeprecated;
case 1276990059: // propDefAnnotationSecondManual
return propDefAnnotationSecondManual;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends SimplePerson> builder() {
return new DirectBeanBuilder<SimplePerson>(new SimplePerson());
}
@Override
public Class<? extends SimplePerson> beanType() {
return SimplePerson.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code forename} property.
* @return the meta-property, not null
*/
public MetaProperty<String> forename() {
return forename;
}
/**
* The meta-property for the {@code surname} property.
* @return the meta-property, not null
*/
public MetaProperty<String> surname() {
return surname;
}
/**
* The meta-property for the {@code numberOfCars} property.
* @return the meta-property, not null
*/
public MetaProperty<Integer> numberOfCars() {
return numberOfCars;
}
/**
* The meta-property for the {@code addressList} property.
* @return the meta-property, not null
*/
public MetaProperty<List<Address>> addressList() {
return addressList;
}
/**
* The meta-property for the {@code otherAddressMap} property.
* @return the meta-property, not null
*/
public MetaProperty<Map<String, Address>> otherAddressMap() {
return otherAddressMap;
}
/**
* The meta-property for the {@code addressesList} property.
* @return the meta-property, not null
*/
public MetaProperty<List<List<Address>>> addressesList() {
return addressesList;
}
/**
* The meta-property for the {@code mainAddress} property.
* @return the meta-property, not null
*/
public MetaProperty<Address> mainAddress() {
return mainAddress;
}
/**
* The meta-property for the {@code propDefAnnotationSecondDeprecated} property.
* @return the meta-property, not null
*/
@Deprecated
public MetaProperty<FlexiBean> propDefAnnotationSecondDeprecated() {
return propDefAnnotationSecondDeprecated;
}
/**
* The meta-property for the {@code propDefAnnotationSecondManual} property.
* @return the meta-property, not null
*/
public MetaProperty<Map<String, String>> propDefAnnotationSecondManual() {
return propDefAnnotationSecondManual;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 467061063: // forename
return ((SimplePerson) bean).getForename();
case -1852993317: // surname
return ((SimplePerson) bean).getSurname();
case 926656063: // numberOfCars
return ((SimplePerson) bean).getNumberOfCars();
case -1377524046: // addressList
return ((SimplePerson) bean).getAddressList();
case 1368089592: // otherAddressMap
return ((SimplePerson) bean).getOtherAddressMap();
case -226885792: // addressesList
return ((SimplePerson) bean).getAddressesList();
case -2032731141: // mainAddress
return ((SimplePerson) bean).getMainAddress();
case 1897330136: // propDefAnnotationSecondDeprecated
return ((SimplePerson) bean).getPropDefAnnotationSecondDeprecated();
case 1276990059: // propDefAnnotationSecondManual
return ((SimplePerson) bean).getPropDefAnnotationSecondManual();
}
return super.propertyGet(bean, propertyName, quiet);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
switch (propertyName.hashCode()) {
case 467061063: // forename
((SimplePerson) bean).setForename((String) newValue);
return;
case -1852993317: // surname
((SimplePerson) bean).setSurname((String) newValue);
return;
case 926656063: // numberOfCars
((SimplePerson) bean).setNumberOfCars((Integer) newValue);
return;
case -1377524046: // addressList
((SimplePerson) bean).setAddressList((List<Address>) newValue);
return;
case 1368089592: // otherAddressMap
((SimplePerson) bean).setOtherAddressMap((Map<String, Address>) newValue);
return;
case -226885792: // addressesList
((SimplePerson) bean).setAddressesList((List<List<Address>>) newValue);
return;
case -2032731141: // mainAddress
((SimplePerson) bean).setMainAddress((Address) newValue);
return;
case 1897330136: // propDefAnnotationSecondDeprecated
((SimplePerson) bean).setPropDefAnnotationSecondDeprecated((FlexiBean) newValue);
return;
case 1276990059: // propDefAnnotationSecondManual
((SimplePerson) bean).setPropDefAnnotationSecondManual((Map<String, String>) newValue);
return;
}
super.propertySet(bean, propertyName, newValue, quiet);
}
@Override
protected void validate(Bean bean) {
JodaBeanUtils.notNull(((SimplePerson) bean).addressList, "addressList");
JodaBeanUtils.notNull(((SimplePerson) bean).otherAddressMap, "otherAddressMap");
JodaBeanUtils.notNull(((SimplePerson) bean).addressesList, "addressesList");
JodaBeanUtils.notNull(((SimplePerson) bean).propDefAnnotationSecondDeprecated, "propDefAnnotationSecondDeprecated");
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
/**
* Copyright (c) 2007, Gaudenz Alder
*/
package com.mxgraph.swing.view;
import java.awt.AlphaComposite;
import java.awt.Composite;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import com.mxgraph.canvas.mxGraphics2DCanvas;
import com.mxgraph.model.mxGeometry;
import com.mxgraph.model.mxIGraphModel;
import com.mxgraph.swing.mxGraphComponent;
import com.mxgraph.util.mxPoint;
import com.mxgraph.util.mxRectangle;
import com.mxgraph.util.mxUtils;
import com.mxgraph.view.mxCellState;
import com.mxgraph.view.mxGraph;
/**
* Represents the current state of a cell in a given graph view.
*/
public class mxCellStatePreview
{
/**
*
*/
protected Map<mxCellState, mxPoint> deltas = new LinkedHashMap<mxCellState, mxPoint>();
/**
*
*/
protected int count = 0;
/**
*
*/
protected mxGraphComponent graphComponent;
/**
* Specifies if cell states should be cloned or changed in-place.
*/
protected boolean cloned;
/**
*
*/
protected float opacity = 1;
/**
*
*/
protected List<mxCellState> cellStates;
/**
* Constructs a new state preview. The paint handler to invoke the paint
* method must be installed elsewhere.
*/
public mxCellStatePreview(mxGraphComponent graphComponent, boolean cloned)
{
this.graphComponent = graphComponent;
this.cloned = cloned;
}
/**
*
*/
public boolean isCloned()
{
return cloned;
}
/**
*
*/
public void setCloned(boolean value)
{
cloned = value;
}
/**
*
*/
public boolean isEmpty()
{
return count == 0;
}
/**
*
*/
public int getCount()
{
return count;
}
/**
*
*/
public Map<mxCellState, mxPoint> getDeltas()
{
return deltas;
}
/**
*
*/
public void setOpacity(float value)
{
opacity = value;
}
/**
*
*/
public float getOpacity()
{
return opacity;
}
/**
*
*/
public mxPoint moveState(mxCellState state, double dx, double dy)
{
return moveState(state, dx, dy, true, true);
}
/**
*
*/
public mxPoint moveState(mxCellState state, double dx, double dy,
boolean add, boolean includeEdges)
{
mxPoint delta = deltas.get(state);
if (delta == null)
{
delta = new mxPoint(dx, dy);
deltas.put(state, delta);
count++;
}
else
{
if (add)
{
delta.setX(delta.getX() + dx);
delta.setY(delta.getY() + dy);
}
else
{
delta.setX(dx);
delta.setY(dy);
}
}
if (includeEdges)
{
addEdges(state);
}
return delta;
}
/**
* Returns a dirty rectangle to be repainted in mxGraphControl.
*/
public mxRectangle show()
{
mxGraph graph = graphComponent.getGraph();
mxIGraphModel model = graph.getModel();
// Stores a copy of the cell states
List<mxCellState> previousStates = null;
if (isCloned())
{
previousStates = new LinkedList<mxCellState>();
Iterator<mxCellState> it = deltas.keySet().iterator();
while (it.hasNext())
{
mxCellState state = it.next();
previousStates.addAll(snapshot(state));
}
}
// Translates the states in step
Iterator<mxCellState> it = deltas.keySet().iterator();
while (it.hasNext())
{
mxCellState state = it.next();
mxPoint delta = deltas.get(state);
mxCellState parentState = graph.getView().getState(
model.getParent(state.getCell()));
translateState(parentState, state, delta.getX(), delta.getY());
}
// Revalidates the states in step
mxRectangle dirty = null;
it = deltas.keySet().iterator();
while (it.hasNext())
{
mxCellState state = it.next();
mxPoint delta = deltas.get(state);
mxCellState parentState = graph.getView().getState(
model.getParent(state.getCell()));
mxRectangle tmp = revalidateState(parentState, state, delta.getX(),
delta.getY());
if (dirty != null)
{
dirty.add(tmp);
}
else
{
dirty = tmp;
}
}
// Takes a snapshot of the states for later drawing. If the states
// are not cloned then this does nothing and just expects a repaint
// of the dirty rectangle.
if (previousStates != null)
{
cellStates = new LinkedList<mxCellState>();
it = deltas.keySet().iterator();
while (it.hasNext())
{
mxCellState state = it.next();
cellStates.addAll(snapshot(state));
}
// Restores the previous states
restore(previousStates);
}
if (dirty != null)
{
dirty.grow(2);
}
return dirty;
}
/**
*
*/
public void restore(List<mxCellState> snapshot)
{
mxGraph graph = graphComponent.getGraph();
Iterator<mxCellState> it = snapshot.iterator();
while (it.hasNext())
{
mxCellState state = it.next();
mxCellState orig = graph.getView().getState(state.getCell());
if (orig != null && orig != state)
{
restoreState(orig, state);
}
}
}
/**
*
*/
public void restoreState(mxCellState state, mxCellState from)
{
state.setLabelBounds(from.getLabelBounds());
state.setAbsolutePoints(from.getAbsolutePoints());
state.setOrigin(from.getOrigin());
state.setAbsoluteOffset(from.getAbsoluteOffset());
state.setBoundingBox(from.getBoundingBox());
state.setTerminalDistance(from.getTerminalDistance());
state.setSegments(from.getSegments());
state.setLength(from.getLength());
state.setX(from.getX());
state.setY(from.getY());
state.setWidth(from.getWidth());
state.setHeight(from.getHeight());
}
/**
*
*/
public List<mxCellState> snapshot(mxCellState state)
{
List<mxCellState> result = new LinkedList<mxCellState>();
if (state != null)
{
result.add((mxCellState) state.clone());
mxGraph graph = graphComponent.getGraph();
mxIGraphModel model = graph.getModel();
Object cell = state.getCell();
int childCount = model.getChildCount(cell);
for (int i = 0; i < childCount; i++)
{
result.addAll(snapshot(graph.getView().getState(
model.getChildAt(cell, i))));
}
}
return result;
}
/**
*
*/
protected void translateState(mxCellState parentState, mxCellState state,
double dx, double dy)
{
if (state != null)
{
mxGraph graph = graphComponent.getGraph();
mxIGraphModel model = graph.getModel();
Object cell = state.getCell();
if (model.isVertex(cell))
{
state.getView().updateCellState(state);
mxGeometry geo = graph.getCellGeometry(cell);
// Moves selection cells and non-relative vertices in
// the first phase so that edge terminal points will
// be updated in the second phase
if ((dx != 0 || dy != 0) && geo != null
&& (!geo.isRelative() || deltas.get(state) != null))
{
state.setX(state.getX() + dx);
state.setY(state.getY() + dy);
}
}
int childCount = model.getChildCount(cell);
for (int i = 0; i < childCount; i++)
{
translateState(state,
graph.getView().getState(model.getChildAt(cell, i)),
dx, dy);
}
}
}
/**
*
*/
protected mxRectangle revalidateState(mxCellState parentState,
mxCellState state, double dx, double dy)
{
mxRectangle dirty = null;
if (state != null)
{
mxGraph graph = graphComponent.getGraph();
mxIGraphModel model = graph.getModel();
Object cell = state.getCell();
// Updates the edge terminal points and restores the
// (relative) positions of any (relative) children
if (model.isEdge(cell))
{
state.getView().updateCellState(state);
}
dirty = state.getView().getBoundingBox(state, false);
// Moves selection vertices which are relative
mxGeometry geo = graph.getCellGeometry(cell);
if ((dx != 0 || dy != 0)
&& geo != null
&& geo.isRelative()
&& model.isVertex(cell)
&& (parentState == null
|| model.isVertex(parentState.getCell()) || deltas
.get(state) != null))
{
state.setX(state.getX() + dx);
state.setY(state.getY() + dy);
// TODO: Check this change
dirty.setX(dirty.getX() + dx);
dirty.setY(dirty.getY() + dy);
graph.getView().updateLabelBounds(state);
}
int childCount = model.getChildCount(cell);
for (int i = 0; i < childCount; i++)
{
mxRectangle tmp = revalidateState(state, graph.getView()
.getState(model.getChildAt(cell, i)), dx, dy);
if (dirty != null)
{
dirty.add(tmp);
}
else
{
dirty = tmp;
}
}
}
return dirty;
}
/**
*
*/
public void addEdges(mxCellState state)
{
mxGraph graph = graphComponent.getGraph();
mxIGraphModel model = graph.getModel();
Object cell = state.getCell();
int edgeCount = model.getEdgeCount(cell);
for (int i = 0; i < edgeCount; i++)
{
mxCellState state2 = graph.getView().getState(
model.getEdgeAt(cell, i));
if (state2 != null)
{
moveState(state2, 0, 0);
}
}
}
/**
*
*/
public void paint(Graphics g)
{
if (cellStates != null && cellStates.size() > 0)
{
mxGraphics2DCanvas canvas = graphComponent.getCanvas();
// Sets antialiasing
if (graphComponent.isAntiAlias())
{
mxUtils.setAntiAlias((Graphics2D) g, true, true);
}
Graphics2D previousGraphics = canvas.getGraphics();
Point previousTranslate = canvas.getTranslate();
double previousScale = canvas.getScale();
try
{
canvas.setScale(graphComponent.getGraph().getView().getScale());
canvas.setTranslate(0, 0);
canvas.setGraphics((Graphics2D) g);
paintPreview(canvas);
}
finally
{
canvas.setScale(previousScale);
canvas.setTranslate(previousTranslate.x, previousTranslate.y);
canvas.setGraphics(previousGraphics);
}
}
}
/**
*
*/
protected float getOpacityForCell(Object cell)
{
return opacity;
}
/**
* Draws the preview using the graphics canvas.
*/
protected void paintPreview(mxGraphics2DCanvas canvas)
{
Composite previousComposite = canvas.getGraphics().getComposite();
// Paints the preview states
Iterator<mxCellState> it = cellStates.iterator();
while (it.hasNext())
{
mxCellState state = it.next();
canvas.getGraphics().setComposite(
AlphaComposite.getInstance(AlphaComposite.SRC_OVER,
getOpacityForCell(state.getCell())));
paintPreviewState(canvas, state);
}
canvas.getGraphics().setComposite(previousComposite);
}
/**
* Draws the preview using the graphics canvas.
*/
protected void paintPreviewState(mxGraphics2DCanvas canvas,
mxCellState state)
{
graphComponent.getGraph().drawState(
canvas,
state,
state.getCell() != graphComponent.getCellEditor()
.getEditingCell());
}
}
| |
/* ********************************************************************
Licensed to Jasig under one or more contributor license
agreements. See the NOTICE file distributed with this work
for additional information regarding copyright ownership.
Jasig licenses this file to you under the Apache License,
Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a
copy of the License at:
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.bedework.util.misc;
import org.bedework.util.logging.BwLogger;
import org.bedework.util.logging.Logged;
import javax.naming.NameNotFoundException;
/** Something to help the handling and graceful shutdown of processes.
*
* @author douglm
*
*/
public abstract class AbstractProcessorThread extends Thread
implements Logged {
public static final String statusDone = "Done";
public static final String statusFailed = "Failed";
public static final String statusRunning = "Running";
public static final String statusStopped = "Stopped";
/** cannot stop the thread */
public static final String statusUnstoppable = "Unstoppable";
private String status = statusStopped;
protected boolean running;
private boolean showedTrace;
/**
*
* @param name for the thread
*/
public AbstractProcessorThread(final String name) {
super(name);
}
/** Called to initialise at start of run. We've already output the
* start of the startup message. This can add more info.
*/
public abstract void runInit();
/** called at end - allows output of termination messsages
* @param msg an info message
*/
public abstract void end(String msg);
/** Do whatever we're supposed to be doing.
* @throws Throwable on fatal error
*/
public abstract void runProcess() throws Throwable;
/** Close the processor.
*/
public abstract void close();
/** Override to handle certain exception types.
* .
* @param val the exception
* @return false if we did nothing
*/
public boolean handleException(final Throwable val) {
if (Util.causeIs(val, NameNotFoundException.class)) {
// jmx shutting down?
error("Looks like JMX shut down.");
error(val);
running = false;
return true;
}
return false;
}
public String getStatus() {
return status;
}
public void setStatus(final String val) {
status = val;
}
/** Set the running flag
*
* @param val the flag
*/
public void setRunning(final boolean val) {
running = val;
}
/**
*
* @return the running flag
*/
public boolean getRunning() {
return running;
}
/** Check for processor started
*
* @param processor to check
* @return status string.
*/
public static String checkStarted(final AbstractProcessorThread processor) {
if (processor == null) {
return statusStopped;
}
if (!processor.isAlive()) {
return statusStopped;
}
if (processor.running) {
return statusRunning;
}
/* Kill it and return false */
processor.interrupt();
try {
processor.join(5000);
} catch (final Throwable ignored) {}
if (!processor.isAlive()) {
return statusStopped;
}
return statusUnstoppable;
}
@Override
public void run() {
info("************************************************************");
info(" * Starting " + getName());
runInit();
info("************************************************************");
long lastErrorTime = 0;
final long errorResetTime = 1000 * 60 * 5; // 5 minutes since last error
int errorCt = 0;
final int maxErrorCt = 5;
while (running) {
try {
runProcess();
} catch (final InterruptedException ie) {
running = false;
break;
} catch (final Throwable t) {
if (!handleException(t)) {
if (System.currentTimeMillis() - lastErrorTime > errorResetTime) {
errorCt = 0;
}
if (errorCt > maxErrorCt) {
error("Too many errors: stopping");
running = false;
break;
}
lastErrorTime = System.currentTimeMillis();
errorCt++;
if (!showedTrace) {
error(t);
// showedTrace = true;
} else {
error(t.getMessage());
}
}
} finally {
close();
}
info("************************************************************");
info(" * " + getName() + " terminated");
info("************************************************************");
}
}
/** Shut down a running process.
*
* @param proc the thread process
* @return false for exception or timeout
*/
public static boolean stopProcess(final AbstractProcessorThread proc) {
if (proc == null) {
return true;
}
proc.info("************************************************************");
proc.info(" * Stopping " + proc.getName());
proc.info("************************************************************");
proc.setRunning(false);
proc.interrupt();
boolean ok = true;
try {
proc.join(20 * 1000);
} catch (final InterruptedException ignored) {
} catch (final Throwable t) {
proc.error("Error waiting for processor termination");
proc.error(t);
ok = false;
}
proc.info("************************************************************");
proc.info(" * " + proc.getName() + " terminated");
proc.info("************************************************************");
return ok;
}
/* ====================================================================
* Logged methods
* ==================================================================== */
private final BwLogger logger = new BwLogger();
@Override
public BwLogger getLogger() {
if ((logger.getLoggedClass() == null) && (logger.getLoggedName() == null)) {
logger.setLoggedClass(getClass());
}
return logger;
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.schemaorg.core;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import com.google.schemaorg.SchemaOrgType;
import com.google.schemaorg.SchemaOrgTypeImpl;
import com.google.schemaorg.ValueType;
import com.google.schemaorg.core.datatype.Date;
import com.google.schemaorg.core.datatype.Text;
import com.google.schemaorg.core.datatype.URL;
import com.google.schemaorg.goog.GoogConstants;
import com.google.schemaorg.goog.PopularityScoreSpecification;
/** Implementation of {@link FoodEstablishment}. */
public class FoodEstablishmentImpl extends LocalBusinessImpl implements FoodEstablishment {
private static final ImmutableSet<String> PROPERTY_SET = initializePropertySet();
private static ImmutableSet<String> initializePropertySet() {
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
builder.add(CoreConstants.PROPERTY_ACCEPTS_RESERVATIONS);
builder.add(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY);
builder.add(CoreConstants.PROPERTY_ADDITIONAL_TYPE);
builder.add(CoreConstants.PROPERTY_ADDRESS);
builder.add(CoreConstants.PROPERTY_AGGREGATE_RATING);
builder.add(CoreConstants.PROPERTY_ALTERNATE_NAME);
builder.add(CoreConstants.PROPERTY_ALUMNI);
builder.add(CoreConstants.PROPERTY_AREA_SERVED);
builder.add(CoreConstants.PROPERTY_AWARD);
builder.add(CoreConstants.PROPERTY_AWARDS);
builder.add(CoreConstants.PROPERTY_BRANCH_CODE);
builder.add(CoreConstants.PROPERTY_BRANCH_OF);
builder.add(CoreConstants.PROPERTY_BRAND);
builder.add(CoreConstants.PROPERTY_CONTACT_POINT);
builder.add(CoreConstants.PROPERTY_CONTACT_POINTS);
builder.add(CoreConstants.PROPERTY_CONTAINED_IN);
builder.add(CoreConstants.PROPERTY_CONTAINED_IN_PLACE);
builder.add(CoreConstants.PROPERTY_CONTAINS_PLACE);
builder.add(CoreConstants.PROPERTY_CURRENCIES_ACCEPTED);
builder.add(CoreConstants.PROPERTY_DEPARTMENT);
builder.add(CoreConstants.PROPERTY_DESCRIPTION);
builder.add(CoreConstants.PROPERTY_DISSOLUTION_DATE);
builder.add(CoreConstants.PROPERTY_DUNS);
builder.add(CoreConstants.PROPERTY_EMAIL);
builder.add(CoreConstants.PROPERTY_EMPLOYEE);
builder.add(CoreConstants.PROPERTY_EMPLOYEES);
builder.add(CoreConstants.PROPERTY_EVENT);
builder.add(CoreConstants.PROPERTY_EVENTS);
builder.add(CoreConstants.PROPERTY_FAX_NUMBER);
builder.add(CoreConstants.PROPERTY_FOUNDER);
builder.add(CoreConstants.PROPERTY_FOUNDERS);
builder.add(CoreConstants.PROPERTY_FOUNDING_DATE);
builder.add(CoreConstants.PROPERTY_FOUNDING_LOCATION);
builder.add(CoreConstants.PROPERTY_GEO);
builder.add(CoreConstants.PROPERTY_GLOBAL_LOCATION_NUMBER);
builder.add(CoreConstants.PROPERTY_HAS_MAP);
builder.add(CoreConstants.PROPERTY_HAS_OFFER_CATALOG);
builder.add(CoreConstants.PROPERTY_HAS_POS);
builder.add(CoreConstants.PROPERTY_IMAGE);
builder.add(CoreConstants.PROPERTY_ISIC_V4);
builder.add(CoreConstants.PROPERTY_LEGAL_NAME);
builder.add(CoreConstants.PROPERTY_LOCATION);
builder.add(CoreConstants.PROPERTY_LOGO);
builder.add(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE);
builder.add(CoreConstants.PROPERTY_MAKES_OFFER);
builder.add(CoreConstants.PROPERTY_MAP);
builder.add(CoreConstants.PROPERTY_MAPS);
builder.add(CoreConstants.PROPERTY_MEMBER);
builder.add(CoreConstants.PROPERTY_MEMBER_OF);
builder.add(CoreConstants.PROPERTY_MEMBERS);
builder.add(CoreConstants.PROPERTY_MENU);
builder.add(CoreConstants.PROPERTY_NAICS);
builder.add(CoreConstants.PROPERTY_NAME);
builder.add(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES);
builder.add(CoreConstants.PROPERTY_OPENING_HOURS);
builder.add(CoreConstants.PROPERTY_OPENING_HOURS_SPECIFICATION);
builder.add(CoreConstants.PROPERTY_OWNS);
builder.add(CoreConstants.PROPERTY_PARENT_ORGANIZATION);
builder.add(CoreConstants.PROPERTY_PAYMENT_ACCEPTED);
builder.add(CoreConstants.PROPERTY_PHOTO);
builder.add(CoreConstants.PROPERTY_PHOTOS);
builder.add(CoreConstants.PROPERTY_POTENTIAL_ACTION);
builder.add(CoreConstants.PROPERTY_PRICE_RANGE);
builder.add(CoreConstants.PROPERTY_REVIEW);
builder.add(CoreConstants.PROPERTY_REVIEWS);
builder.add(CoreConstants.PROPERTY_SAME_AS);
builder.add(CoreConstants.PROPERTY_SEEKS);
builder.add(CoreConstants.PROPERTY_SERVES_CUISINE);
builder.add(CoreConstants.PROPERTY_SERVICE_AREA);
builder.add(CoreConstants.PROPERTY_SUB_ORGANIZATION);
builder.add(CoreConstants.PROPERTY_TAX_ID);
builder.add(CoreConstants.PROPERTY_TELEPHONE);
builder.add(CoreConstants.PROPERTY_URL);
builder.add(CoreConstants.PROPERTY_VAT_ID);
builder.add(GoogConstants.PROPERTY_DETAILED_DESCRIPTION);
builder.add(GoogConstants.PROPERTY_POPULARITY_SCORE);
return builder.build();
}
static final class BuilderImpl extends SchemaOrgTypeImpl.BuilderImpl<FoodEstablishment.Builder>
implements FoodEstablishment.Builder {
@Override
public FoodEstablishment.Builder addAcceptsReservations(Boolean value) {
return addProperty(CoreConstants.PROPERTY_ACCEPTS_RESERVATIONS, value);
}
@Override
public FoodEstablishment.Builder addAcceptsReservations(Text value) {
return addProperty(CoreConstants.PROPERTY_ACCEPTS_RESERVATIONS, value);
}
@Override
public FoodEstablishment.Builder addAcceptsReservations(URL value) {
return addProperty(CoreConstants.PROPERTY_ACCEPTS_RESERVATIONS, value);
}
@Override
public FoodEstablishment.Builder addAcceptsReservations(String value) {
return addProperty(CoreConstants.PROPERTY_ACCEPTS_RESERVATIONS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addAdditionalProperty(PropertyValue value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY, value);
}
@Override
public FoodEstablishment.Builder addAdditionalProperty(PropertyValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY, value.build());
}
@Override
public FoodEstablishment.Builder addAdditionalProperty(String value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY, Text.of(value));
}
@Override
public FoodEstablishment.Builder addAdditionalType(URL value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, value);
}
@Override
public FoodEstablishment.Builder addAdditionalType(String value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, Text.of(value));
}
@Override
public FoodEstablishment.Builder addAddress(PostalAddress value) {
return addProperty(CoreConstants.PROPERTY_ADDRESS, value);
}
@Override
public FoodEstablishment.Builder addAddress(PostalAddress.Builder value) {
return addProperty(CoreConstants.PROPERTY_ADDRESS, value.build());
}
@Override
public FoodEstablishment.Builder addAddress(Text value) {
return addProperty(CoreConstants.PROPERTY_ADDRESS, value);
}
@Override
public FoodEstablishment.Builder addAddress(String value) {
return addProperty(CoreConstants.PROPERTY_ADDRESS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addAggregateRating(AggregateRating value) {
return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value);
}
@Override
public FoodEstablishment.Builder addAggregateRating(AggregateRating.Builder value) {
return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value.build());
}
@Override
public FoodEstablishment.Builder addAggregateRating(String value) {
return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, Text.of(value));
}
@Override
public FoodEstablishment.Builder addAlternateName(Text value) {
return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, value);
}
@Override
public FoodEstablishment.Builder addAlternateName(String value) {
return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, Text.of(value));
}
@Override
public FoodEstablishment.Builder addAlumni(Person value) {
return addProperty(CoreConstants.PROPERTY_ALUMNI, value);
}
@Override
public FoodEstablishment.Builder addAlumni(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_ALUMNI, value.build());
}
@Override
public FoodEstablishment.Builder addAlumni(String value) {
return addProperty(CoreConstants.PROPERTY_ALUMNI, Text.of(value));
}
@Override
public FoodEstablishment.Builder addAreaServed(AdministrativeArea value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value);
}
@Override
public FoodEstablishment.Builder addAreaServed(AdministrativeArea.Builder value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build());
}
@Override
public FoodEstablishment.Builder addAreaServed(GeoShape value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value);
}
@Override
public FoodEstablishment.Builder addAreaServed(GeoShape.Builder value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build());
}
@Override
public FoodEstablishment.Builder addAreaServed(Place value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value);
}
@Override
public FoodEstablishment.Builder addAreaServed(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build());
}
@Override
public FoodEstablishment.Builder addAreaServed(Text value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value);
}
@Override
public FoodEstablishment.Builder addAreaServed(String value) {
return addProperty(CoreConstants.PROPERTY_AREA_SERVED, Text.of(value));
}
@Override
public FoodEstablishment.Builder addAward(Text value) {
return addProperty(CoreConstants.PROPERTY_AWARD, value);
}
@Override
public FoodEstablishment.Builder addAward(String value) {
return addProperty(CoreConstants.PROPERTY_AWARD, Text.of(value));
}
@Override
public FoodEstablishment.Builder addAwards(Text value) {
return addProperty(CoreConstants.PROPERTY_AWARDS, value);
}
@Override
public FoodEstablishment.Builder addAwards(String value) {
return addProperty(CoreConstants.PROPERTY_AWARDS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addBranchCode(Text value) {
return addProperty(CoreConstants.PROPERTY_BRANCH_CODE, value);
}
@Override
public FoodEstablishment.Builder addBranchCode(String value) {
return addProperty(CoreConstants.PROPERTY_BRANCH_CODE, Text.of(value));
}
@Override
public FoodEstablishment.Builder addBranchOf(Organization value) {
return addProperty(CoreConstants.PROPERTY_BRANCH_OF, value);
}
@Override
public FoodEstablishment.Builder addBranchOf(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_BRANCH_OF, value.build());
}
@Override
public FoodEstablishment.Builder addBranchOf(String value) {
return addProperty(CoreConstants.PROPERTY_BRANCH_OF, Text.of(value));
}
@Override
public FoodEstablishment.Builder addBrand(Brand value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value);
}
@Override
public FoodEstablishment.Builder addBrand(Brand.Builder value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value.build());
}
@Override
public FoodEstablishment.Builder addBrand(Organization value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value);
}
@Override
public FoodEstablishment.Builder addBrand(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_BRAND, value.build());
}
@Override
public FoodEstablishment.Builder addBrand(String value) {
return addProperty(CoreConstants.PROPERTY_BRAND, Text.of(value));
}
@Override
public FoodEstablishment.Builder addContactPoint(ContactPoint value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINT, value);
}
@Override
public FoodEstablishment.Builder addContactPoint(ContactPoint.Builder value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINT, value.build());
}
@Override
public FoodEstablishment.Builder addContactPoint(String value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINT, Text.of(value));
}
@Override
public FoodEstablishment.Builder addContactPoints(ContactPoint value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINTS, value);
}
@Override
public FoodEstablishment.Builder addContactPoints(ContactPoint.Builder value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINTS, value.build());
}
@Override
public FoodEstablishment.Builder addContactPoints(String value) {
return addProperty(CoreConstants.PROPERTY_CONTACT_POINTS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addContainedIn(Place value) {
return addProperty(CoreConstants.PROPERTY_CONTAINED_IN, value);
}
@Override
public FoodEstablishment.Builder addContainedIn(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_CONTAINED_IN, value.build());
}
@Override
public FoodEstablishment.Builder addContainedIn(String value) {
return addProperty(CoreConstants.PROPERTY_CONTAINED_IN, Text.of(value));
}
@Override
public FoodEstablishment.Builder addContainedInPlace(Place value) {
return addProperty(CoreConstants.PROPERTY_CONTAINED_IN_PLACE, value);
}
@Override
public FoodEstablishment.Builder addContainedInPlace(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_CONTAINED_IN_PLACE, value.build());
}
@Override
public FoodEstablishment.Builder addContainedInPlace(String value) {
return addProperty(CoreConstants.PROPERTY_CONTAINED_IN_PLACE, Text.of(value));
}
@Override
public FoodEstablishment.Builder addContainsPlace(Place value) {
return addProperty(CoreConstants.PROPERTY_CONTAINS_PLACE, value);
}
@Override
public FoodEstablishment.Builder addContainsPlace(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_CONTAINS_PLACE, value.build());
}
@Override
public FoodEstablishment.Builder addContainsPlace(String value) {
return addProperty(CoreConstants.PROPERTY_CONTAINS_PLACE, Text.of(value));
}
@Override
public FoodEstablishment.Builder addCurrenciesAccepted(Text value) {
return addProperty(CoreConstants.PROPERTY_CURRENCIES_ACCEPTED, value);
}
@Override
public FoodEstablishment.Builder addCurrenciesAccepted(String value) {
return addProperty(CoreConstants.PROPERTY_CURRENCIES_ACCEPTED, Text.of(value));
}
@Override
public FoodEstablishment.Builder addDepartment(Organization value) {
return addProperty(CoreConstants.PROPERTY_DEPARTMENT, value);
}
@Override
public FoodEstablishment.Builder addDepartment(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_DEPARTMENT, value.build());
}
@Override
public FoodEstablishment.Builder addDepartment(String value) {
return addProperty(CoreConstants.PROPERTY_DEPARTMENT, Text.of(value));
}
@Override
public FoodEstablishment.Builder addDescription(Text value) {
return addProperty(CoreConstants.PROPERTY_DESCRIPTION, value);
}
@Override
public FoodEstablishment.Builder addDescription(String value) {
return addProperty(CoreConstants.PROPERTY_DESCRIPTION, Text.of(value));
}
@Override
public FoodEstablishment.Builder addDissolutionDate(Date value) {
return addProperty(CoreConstants.PROPERTY_DISSOLUTION_DATE, value);
}
@Override
public FoodEstablishment.Builder addDissolutionDate(String value) {
return addProperty(CoreConstants.PROPERTY_DISSOLUTION_DATE, Text.of(value));
}
@Override
public FoodEstablishment.Builder addDuns(Text value) {
return addProperty(CoreConstants.PROPERTY_DUNS, value);
}
@Override
public FoodEstablishment.Builder addDuns(String value) {
return addProperty(CoreConstants.PROPERTY_DUNS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addEmail(Text value) {
return addProperty(CoreConstants.PROPERTY_EMAIL, value);
}
@Override
public FoodEstablishment.Builder addEmail(String value) {
return addProperty(CoreConstants.PROPERTY_EMAIL, Text.of(value));
}
@Override
public FoodEstablishment.Builder addEmployee(Person value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEE, value);
}
@Override
public FoodEstablishment.Builder addEmployee(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEE, value.build());
}
@Override
public FoodEstablishment.Builder addEmployee(String value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEE, Text.of(value));
}
@Override
public FoodEstablishment.Builder addEmployees(Person value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEES, value);
}
@Override
public FoodEstablishment.Builder addEmployees(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEES, value.build());
}
@Override
public FoodEstablishment.Builder addEmployees(String value) {
return addProperty(CoreConstants.PROPERTY_EMPLOYEES, Text.of(value));
}
@Override
public FoodEstablishment.Builder addEvent(Event value) {
return addProperty(CoreConstants.PROPERTY_EVENT, value);
}
@Override
public FoodEstablishment.Builder addEvent(Event.Builder value) {
return addProperty(CoreConstants.PROPERTY_EVENT, value.build());
}
@Override
public FoodEstablishment.Builder addEvent(String value) {
return addProperty(CoreConstants.PROPERTY_EVENT, Text.of(value));
}
@Override
public FoodEstablishment.Builder addEvents(Event value) {
return addProperty(CoreConstants.PROPERTY_EVENTS, value);
}
@Override
public FoodEstablishment.Builder addEvents(Event.Builder value) {
return addProperty(CoreConstants.PROPERTY_EVENTS, value.build());
}
@Override
public FoodEstablishment.Builder addEvents(String value) {
return addProperty(CoreConstants.PROPERTY_EVENTS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addFaxNumber(Text value) {
return addProperty(CoreConstants.PROPERTY_FAX_NUMBER, value);
}
@Override
public FoodEstablishment.Builder addFaxNumber(String value) {
return addProperty(CoreConstants.PROPERTY_FAX_NUMBER, Text.of(value));
}
@Override
public FoodEstablishment.Builder addFounder(Person value) {
return addProperty(CoreConstants.PROPERTY_FOUNDER, value);
}
@Override
public FoodEstablishment.Builder addFounder(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_FOUNDER, value.build());
}
@Override
public FoodEstablishment.Builder addFounder(String value) {
return addProperty(CoreConstants.PROPERTY_FOUNDER, Text.of(value));
}
@Override
public FoodEstablishment.Builder addFounders(Person value) {
return addProperty(CoreConstants.PROPERTY_FOUNDERS, value);
}
@Override
public FoodEstablishment.Builder addFounders(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_FOUNDERS, value.build());
}
@Override
public FoodEstablishment.Builder addFounders(String value) {
return addProperty(CoreConstants.PROPERTY_FOUNDERS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addFoundingDate(Date value) {
return addProperty(CoreConstants.PROPERTY_FOUNDING_DATE, value);
}
@Override
public FoodEstablishment.Builder addFoundingDate(String value) {
return addProperty(CoreConstants.PROPERTY_FOUNDING_DATE, Text.of(value));
}
@Override
public FoodEstablishment.Builder addFoundingLocation(Place value) {
return addProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION, value);
}
@Override
public FoodEstablishment.Builder addFoundingLocation(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION, value.build());
}
@Override
public FoodEstablishment.Builder addFoundingLocation(String value) {
return addProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION, Text.of(value));
}
@Override
public FoodEstablishment.Builder addGeo(GeoCoordinates value) {
return addProperty(CoreConstants.PROPERTY_GEO, value);
}
@Override
public FoodEstablishment.Builder addGeo(GeoCoordinates.Builder value) {
return addProperty(CoreConstants.PROPERTY_GEO, value.build());
}
@Override
public FoodEstablishment.Builder addGeo(GeoShape value) {
return addProperty(CoreConstants.PROPERTY_GEO, value);
}
@Override
public FoodEstablishment.Builder addGeo(GeoShape.Builder value) {
return addProperty(CoreConstants.PROPERTY_GEO, value.build());
}
@Override
public FoodEstablishment.Builder addGeo(String value) {
return addProperty(CoreConstants.PROPERTY_GEO, Text.of(value));
}
@Override
public FoodEstablishment.Builder addGlobalLocationNumber(Text value) {
return addProperty(CoreConstants.PROPERTY_GLOBAL_LOCATION_NUMBER, value);
}
@Override
public FoodEstablishment.Builder addGlobalLocationNumber(String value) {
return addProperty(CoreConstants.PROPERTY_GLOBAL_LOCATION_NUMBER, Text.of(value));
}
@Override
public FoodEstablishment.Builder addHasMap(Map value) {
return addProperty(CoreConstants.PROPERTY_HAS_MAP, value);
}
@Override
public FoodEstablishment.Builder addHasMap(Map.Builder value) {
return addProperty(CoreConstants.PROPERTY_HAS_MAP, value.build());
}
@Override
public FoodEstablishment.Builder addHasMap(URL value) {
return addProperty(CoreConstants.PROPERTY_HAS_MAP, value);
}
@Override
public FoodEstablishment.Builder addHasMap(String value) {
return addProperty(CoreConstants.PROPERTY_HAS_MAP, Text.of(value));
}
@Override
public FoodEstablishment.Builder addHasOfferCatalog(OfferCatalog value) {
return addProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG, value);
}
@Override
public FoodEstablishment.Builder addHasOfferCatalog(OfferCatalog.Builder value) {
return addProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG, value.build());
}
@Override
public FoodEstablishment.Builder addHasOfferCatalog(String value) {
return addProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG, Text.of(value));
}
@Override
public FoodEstablishment.Builder addHasPOS(Place value) {
return addProperty(CoreConstants.PROPERTY_HAS_POS, value);
}
@Override
public FoodEstablishment.Builder addHasPOS(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_HAS_POS, value.build());
}
@Override
public FoodEstablishment.Builder addHasPOS(String value) {
return addProperty(CoreConstants.PROPERTY_HAS_POS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addImage(ImageObject value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, value);
}
@Override
public FoodEstablishment.Builder addImage(ImageObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, value.build());
}
@Override
public FoodEstablishment.Builder addImage(URL value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, value);
}
@Override
public FoodEstablishment.Builder addImage(String value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, Text.of(value));
}
@Override
public FoodEstablishment.Builder addIsicV4(Text value) {
return addProperty(CoreConstants.PROPERTY_ISIC_V4, value);
}
@Override
public FoodEstablishment.Builder addIsicV4(String value) {
return addProperty(CoreConstants.PROPERTY_ISIC_V4, Text.of(value));
}
@Override
public FoodEstablishment.Builder addLegalName(Text value) {
return addProperty(CoreConstants.PROPERTY_LEGAL_NAME, value);
}
@Override
public FoodEstablishment.Builder addLegalName(String value) {
return addProperty(CoreConstants.PROPERTY_LEGAL_NAME, Text.of(value));
}
@Override
public FoodEstablishment.Builder addLocation(Place value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, value);
}
@Override
public FoodEstablishment.Builder addLocation(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, value.build());
}
@Override
public FoodEstablishment.Builder addLocation(PostalAddress value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, value);
}
@Override
public FoodEstablishment.Builder addLocation(PostalAddress.Builder value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, value.build());
}
@Override
public FoodEstablishment.Builder addLocation(Text value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, value);
}
@Override
public FoodEstablishment.Builder addLocation(String value) {
return addProperty(CoreConstants.PROPERTY_LOCATION, Text.of(value));
}
@Override
public FoodEstablishment.Builder addLogo(ImageObject value) {
return addProperty(CoreConstants.PROPERTY_LOGO, value);
}
@Override
public FoodEstablishment.Builder addLogo(ImageObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_LOGO, value.build());
}
@Override
public FoodEstablishment.Builder addLogo(URL value) {
return addProperty(CoreConstants.PROPERTY_LOGO, value);
}
@Override
public FoodEstablishment.Builder addLogo(String value) {
return addProperty(CoreConstants.PROPERTY_LOGO, Text.of(value));
}
@Override
public FoodEstablishment.Builder addMainEntityOfPage(CreativeWork value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value);
}
@Override
public FoodEstablishment.Builder addMainEntityOfPage(CreativeWork.Builder value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value.build());
}
@Override
public FoodEstablishment.Builder addMainEntityOfPage(URL value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value);
}
@Override
public FoodEstablishment.Builder addMainEntityOfPage(String value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, Text.of(value));
}
@Override
public FoodEstablishment.Builder addMakesOffer(Offer value) {
return addProperty(CoreConstants.PROPERTY_MAKES_OFFER, value);
}
@Override
public FoodEstablishment.Builder addMakesOffer(Offer.Builder value) {
return addProperty(CoreConstants.PROPERTY_MAKES_OFFER, value.build());
}
@Override
public FoodEstablishment.Builder addMakesOffer(String value) {
return addProperty(CoreConstants.PROPERTY_MAKES_OFFER, Text.of(value));
}
@Override
public FoodEstablishment.Builder addMap(URL value) {
return addProperty(CoreConstants.PROPERTY_MAP, value);
}
@Override
public FoodEstablishment.Builder addMap(String value) {
return addProperty(CoreConstants.PROPERTY_MAP, Text.of(value));
}
@Override
public FoodEstablishment.Builder addMaps(URL value) {
return addProperty(CoreConstants.PROPERTY_MAPS, value);
}
@Override
public FoodEstablishment.Builder addMaps(String value) {
return addProperty(CoreConstants.PROPERTY_MAPS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addMember(Organization value) {
return addProperty(CoreConstants.PROPERTY_MEMBER, value);
}
@Override
public FoodEstablishment.Builder addMember(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBER, value.build());
}
@Override
public FoodEstablishment.Builder addMember(Person value) {
return addProperty(CoreConstants.PROPERTY_MEMBER, value);
}
@Override
public FoodEstablishment.Builder addMember(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBER, value.build());
}
@Override
public FoodEstablishment.Builder addMember(String value) {
return addProperty(CoreConstants.PROPERTY_MEMBER, Text.of(value));
}
@Override
public FoodEstablishment.Builder addMemberOf(Organization value) {
return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value);
}
@Override
public FoodEstablishment.Builder addMemberOf(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value.build());
}
@Override
public FoodEstablishment.Builder addMemberOf(ProgramMembership value) {
return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value);
}
@Override
public FoodEstablishment.Builder addMemberOf(ProgramMembership.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value.build());
}
@Override
public FoodEstablishment.Builder addMemberOf(String value) {
return addProperty(CoreConstants.PROPERTY_MEMBER_OF, Text.of(value));
}
@Override
public FoodEstablishment.Builder addMembers(Organization value) {
return addProperty(CoreConstants.PROPERTY_MEMBERS, value);
}
@Override
public FoodEstablishment.Builder addMembers(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBERS, value.build());
}
@Override
public FoodEstablishment.Builder addMembers(Person value) {
return addProperty(CoreConstants.PROPERTY_MEMBERS, value);
}
@Override
public FoodEstablishment.Builder addMembers(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_MEMBERS, value.build());
}
@Override
public FoodEstablishment.Builder addMembers(String value) {
return addProperty(CoreConstants.PROPERTY_MEMBERS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addMenu(Text value) {
return addProperty(CoreConstants.PROPERTY_MENU, value);
}
@Override
public FoodEstablishment.Builder addMenu(URL value) {
return addProperty(CoreConstants.PROPERTY_MENU, value);
}
@Override
public FoodEstablishment.Builder addMenu(String value) {
return addProperty(CoreConstants.PROPERTY_MENU, Text.of(value));
}
@Override
public FoodEstablishment.Builder addNaics(Text value) {
return addProperty(CoreConstants.PROPERTY_NAICS, value);
}
@Override
public FoodEstablishment.Builder addNaics(String value) {
return addProperty(CoreConstants.PROPERTY_NAICS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addName(Text value) {
return addProperty(CoreConstants.PROPERTY_NAME, value);
}
@Override
public FoodEstablishment.Builder addName(String value) {
return addProperty(CoreConstants.PROPERTY_NAME, Text.of(value));
}
@Override
public FoodEstablishment.Builder addNumberOfEmployees(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES, value);
}
@Override
public FoodEstablishment.Builder addNumberOfEmployees(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES, value.build());
}
@Override
public FoodEstablishment.Builder addNumberOfEmployees(String value) {
return addProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES, Text.of(value));
}
@Override
public FoodEstablishment.Builder addOpeningHours(Text value) {
return addProperty(CoreConstants.PROPERTY_OPENING_HOURS, value);
}
@Override
public FoodEstablishment.Builder addOpeningHours(String value) {
return addProperty(CoreConstants.PROPERTY_OPENING_HOURS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addOpeningHoursSpecification(OpeningHoursSpecification value) {
return addProperty(CoreConstants.PROPERTY_OPENING_HOURS_SPECIFICATION, value);
}
@Override
public FoodEstablishment.Builder addOpeningHoursSpecification(
OpeningHoursSpecification.Builder value) {
return addProperty(CoreConstants.PROPERTY_OPENING_HOURS_SPECIFICATION, value.build());
}
@Override
public FoodEstablishment.Builder addOpeningHoursSpecification(String value) {
return addProperty(CoreConstants.PROPERTY_OPENING_HOURS_SPECIFICATION, Text.of(value));
}
@Override
public FoodEstablishment.Builder addOwns(OwnershipInfo value) {
return addProperty(CoreConstants.PROPERTY_OWNS, value);
}
@Override
public FoodEstablishment.Builder addOwns(OwnershipInfo.Builder value) {
return addProperty(CoreConstants.PROPERTY_OWNS, value.build());
}
@Override
public FoodEstablishment.Builder addOwns(Product value) {
return addProperty(CoreConstants.PROPERTY_OWNS, value);
}
@Override
public FoodEstablishment.Builder addOwns(Product.Builder value) {
return addProperty(CoreConstants.PROPERTY_OWNS, value.build());
}
@Override
public FoodEstablishment.Builder addOwns(String value) {
return addProperty(CoreConstants.PROPERTY_OWNS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addParentOrganization(Organization value) {
return addProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION, value);
}
@Override
public FoodEstablishment.Builder addParentOrganization(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION, value.build());
}
@Override
public FoodEstablishment.Builder addParentOrganization(String value) {
return addProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION, Text.of(value));
}
@Override
public FoodEstablishment.Builder addPaymentAccepted(Text value) {
return addProperty(CoreConstants.PROPERTY_PAYMENT_ACCEPTED, value);
}
@Override
public FoodEstablishment.Builder addPaymentAccepted(String value) {
return addProperty(CoreConstants.PROPERTY_PAYMENT_ACCEPTED, Text.of(value));
}
@Override
public FoodEstablishment.Builder addPhoto(ImageObject value) {
return addProperty(CoreConstants.PROPERTY_PHOTO, value);
}
@Override
public FoodEstablishment.Builder addPhoto(ImageObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_PHOTO, value.build());
}
@Override
public FoodEstablishment.Builder addPhoto(Photograph value) {
return addProperty(CoreConstants.PROPERTY_PHOTO, value);
}
@Override
public FoodEstablishment.Builder addPhoto(Photograph.Builder value) {
return addProperty(CoreConstants.PROPERTY_PHOTO, value.build());
}
@Override
public FoodEstablishment.Builder addPhoto(String value) {
return addProperty(CoreConstants.PROPERTY_PHOTO, Text.of(value));
}
@Override
public FoodEstablishment.Builder addPhotos(ImageObject value) {
return addProperty(CoreConstants.PROPERTY_PHOTOS, value);
}
@Override
public FoodEstablishment.Builder addPhotos(ImageObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_PHOTOS, value.build());
}
@Override
public FoodEstablishment.Builder addPhotos(Photograph value) {
return addProperty(CoreConstants.PROPERTY_PHOTOS, value);
}
@Override
public FoodEstablishment.Builder addPhotos(Photograph.Builder value) {
return addProperty(CoreConstants.PROPERTY_PHOTOS, value.build());
}
@Override
public FoodEstablishment.Builder addPhotos(String value) {
return addProperty(CoreConstants.PROPERTY_PHOTOS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addPotentialAction(Action value) {
return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value);
}
@Override
public FoodEstablishment.Builder addPotentialAction(Action.Builder value) {
return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value.build());
}
@Override
public FoodEstablishment.Builder addPotentialAction(String value) {
return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, Text.of(value));
}
@Override
public FoodEstablishment.Builder addPriceRange(Text value) {
return addProperty(CoreConstants.PROPERTY_PRICE_RANGE, value);
}
@Override
public FoodEstablishment.Builder addPriceRange(String value) {
return addProperty(CoreConstants.PROPERTY_PRICE_RANGE, Text.of(value));
}
@Override
public FoodEstablishment.Builder addReview(Review value) {
return addProperty(CoreConstants.PROPERTY_REVIEW, value);
}
@Override
public FoodEstablishment.Builder addReview(Review.Builder value) {
return addProperty(CoreConstants.PROPERTY_REVIEW, value.build());
}
@Override
public FoodEstablishment.Builder addReview(String value) {
return addProperty(CoreConstants.PROPERTY_REVIEW, Text.of(value));
}
@Override
public FoodEstablishment.Builder addReviews(Review value) {
return addProperty(CoreConstants.PROPERTY_REVIEWS, value);
}
@Override
public FoodEstablishment.Builder addReviews(Review.Builder value) {
return addProperty(CoreConstants.PROPERTY_REVIEWS, value.build());
}
@Override
public FoodEstablishment.Builder addReviews(String value) {
return addProperty(CoreConstants.PROPERTY_REVIEWS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addSameAs(URL value) {
return addProperty(CoreConstants.PROPERTY_SAME_AS, value);
}
@Override
public FoodEstablishment.Builder addSameAs(String value) {
return addProperty(CoreConstants.PROPERTY_SAME_AS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addSeeks(Demand value) {
return addProperty(CoreConstants.PROPERTY_SEEKS, value);
}
@Override
public FoodEstablishment.Builder addSeeks(Demand.Builder value) {
return addProperty(CoreConstants.PROPERTY_SEEKS, value.build());
}
@Override
public FoodEstablishment.Builder addSeeks(String value) {
return addProperty(CoreConstants.PROPERTY_SEEKS, Text.of(value));
}
@Override
public FoodEstablishment.Builder addServesCuisine(Text value) {
return addProperty(CoreConstants.PROPERTY_SERVES_CUISINE, value);
}
@Override
public FoodEstablishment.Builder addServesCuisine(String value) {
return addProperty(CoreConstants.PROPERTY_SERVES_CUISINE, Text.of(value));
}
@Override
public FoodEstablishment.Builder addServiceArea(AdministrativeArea value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value);
}
@Override
public FoodEstablishment.Builder addServiceArea(AdministrativeArea.Builder value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value.build());
}
@Override
public FoodEstablishment.Builder addServiceArea(GeoShape value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value);
}
@Override
public FoodEstablishment.Builder addServiceArea(GeoShape.Builder value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value.build());
}
@Override
public FoodEstablishment.Builder addServiceArea(Place value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value);
}
@Override
public FoodEstablishment.Builder addServiceArea(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value.build());
}
@Override
public FoodEstablishment.Builder addServiceArea(String value) {
return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, Text.of(value));
}
@Override
public FoodEstablishment.Builder addSubOrganization(Organization value) {
return addProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION, value);
}
@Override
public FoodEstablishment.Builder addSubOrganization(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION, value.build());
}
@Override
public FoodEstablishment.Builder addSubOrganization(String value) {
return addProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION, Text.of(value));
}
@Override
public FoodEstablishment.Builder addTaxID(Text value) {
return addProperty(CoreConstants.PROPERTY_TAX_ID, value);
}
@Override
public FoodEstablishment.Builder addTaxID(String value) {
return addProperty(CoreConstants.PROPERTY_TAX_ID, Text.of(value));
}
@Override
public FoodEstablishment.Builder addTelephone(Text value) {
return addProperty(CoreConstants.PROPERTY_TELEPHONE, value);
}
@Override
public FoodEstablishment.Builder addTelephone(String value) {
return addProperty(CoreConstants.PROPERTY_TELEPHONE, Text.of(value));
}
@Override
public FoodEstablishment.Builder addUrl(URL value) {
return addProperty(CoreConstants.PROPERTY_URL, value);
}
@Override
public FoodEstablishment.Builder addUrl(String value) {
return addProperty(CoreConstants.PROPERTY_URL, Text.of(value));
}
@Override
public FoodEstablishment.Builder addVatID(Text value) {
return addProperty(CoreConstants.PROPERTY_VAT_ID, value);
}
@Override
public FoodEstablishment.Builder addVatID(String value) {
return addProperty(CoreConstants.PROPERTY_VAT_ID, Text.of(value));
}
@Override
public FoodEstablishment.Builder addDetailedDescription(Article value) {
return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value);
}
@Override
public FoodEstablishment.Builder addDetailedDescription(Article.Builder value) {
return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value.build());
}
@Override
public FoodEstablishment.Builder addDetailedDescription(String value) {
return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, Text.of(value));
}
@Override
public FoodEstablishment.Builder addPopularityScore(PopularityScoreSpecification value) {
return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value);
}
@Override
public FoodEstablishment.Builder addPopularityScore(
PopularityScoreSpecification.Builder value) {
return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value.build());
}
@Override
public FoodEstablishment.Builder addPopularityScore(String value) {
return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, Text.of(value));
}
@Override
public FoodEstablishment build() {
return new FoodEstablishmentImpl(properties, reverseMap);
}
}
public FoodEstablishmentImpl(
Multimap<String, ValueType> properties, Multimap<String, Thing> reverseMap) {
super(properties, reverseMap);
}
@Override
public String getFullTypeName() {
return CoreConstants.TYPE_FOOD_ESTABLISHMENT;
}
@Override
public boolean includesProperty(String property) {
return PROPERTY_SET.contains(CoreConstants.NAMESPACE + property)
|| PROPERTY_SET.contains(GoogConstants.NAMESPACE + property)
|| PROPERTY_SET.contains(property);
}
@Override
public ImmutableList<SchemaOrgType> getAcceptsReservationsList() {
return getProperty(CoreConstants.PROPERTY_ACCEPTS_RESERVATIONS);
}
@Override
public ImmutableList<SchemaOrgType> getMenuList() {
return getProperty(CoreConstants.PROPERTY_MENU);
}
@Override
public ImmutableList<SchemaOrgType> getServesCuisineList() {
return getProperty(CoreConstants.PROPERTY_SERVES_CUISINE);
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.updateSettings.impl;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.plugins.IdeaPluginDescriptor;
import com.intellij.ide.plugins.PluginManagerConfigurable;
import com.intellij.ide.plugins.PluginManagerMain;
import com.intellij.ide.plugins.PluginNode;
import com.intellij.ide.plugins.newui.*;
import com.intellij.notification.NotificationType;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ApplicationNamesInfo;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.extensions.PluginDescriptor;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.progress.PerformInBackgroundOption;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.Divider;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.util.text.StringUtilRt;
import com.intellij.openapi.wm.impl.welcomeScreen.WelcomeFrame;
import com.intellij.ui.OnePixelSplitter;
import com.intellij.ui.components.ActionLink;
import com.intellij.ui.components.labels.LinkListener;
import com.intellij.ui.components.panels.OpaquePanel;
import com.intellij.ui.components.panels.Wrapper;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.JBDimension;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import java.util.stream.Collectors;
/**
* @author Alexander Lobas
*/
final class PluginUpdateDialog extends DialogWrapper {
private final @NotNull Collection<PluginDownloader> myDownloaders;
private final boolean myPlatformUpdate;
private final MyPluginModel myPluginModel;
private final PluginsGroupComponent myPluginsPanel;
private final PluginsGroup myGroup = new PluginsGroup("");
private final PluginDetailsPageComponent myDetailsPage;
private final JLabel myTotalLabel = new JLabel();
private final ActionLink myIgnoreAction;
private @Nullable Runnable myFinishCallback;
PluginUpdateDialog(@Nullable Project project,
@NotNull Collection<PluginDownloader> downloaders,
@Nullable Collection<PluginNode> customRepositoryPlugins) {
this(project, downloaders, customRepositoryPlugins, false);
setTitle(IdeBundle.message("dialog.title.plugin.updates"));
}
PluginUpdateDialog(@Nullable Project project, @NotNull Collection<PluginDownloader> updatedPlugins) {
this(project, updatedPlugins, null, true);
setTitle(IdeBundle.message("updates.dialog.title", ApplicationNamesInfo.getInstance().getFullProductName()));
}
private PluginUpdateDialog(@Nullable Project project,
Collection<PluginDownloader> downloaders,
@Nullable Collection<PluginNode> customRepositoryPlugins,
boolean platformUpdate) {
super(project, true);
myDownloaders = downloaders;
myPlatformUpdate = platformUpdate;
myIgnoreAction = new ActionLink(IdeBundle.message("updates.ignore.updates.button", downloaders.size()), e -> {
close(CANCEL_EXIT_CODE);
UpdateChecker.ignorePlugins(ContainerUtil.map(myGroup.ui.plugins, ListPluginComponent::getPluginDescriptor));
});
myPluginModel = new MyPluginModel(project) {
@Override
public void runRestartButton(@NotNull Component component) {
doOKAction();
}
@Override
protected @NotNull Collection<PluginNode> getCustomRepoPlugins() {
return customRepositoryPlugins != null ? customRepositoryPlugins : super.getCustomRepoPlugins();
}
};
myPluginModel.setTopController(Configurable.TopComponentController.EMPTY);
myPluginModel.setPluginUpdatesService(new PluginUpdatesService() {
@Override
public void finishUpdate() { }
});
//noinspection unchecked
myDetailsPage = new PluginDetailsPageComponent(myPluginModel, LinkListener.NULL, true);
myDetailsPage.setOnlyUpdateMode();
MultiSelectionEventHandler eventHandler = new MultiSelectionEventHandler();
myPluginsPanel = new PluginsGroupComponent(eventHandler) {
@Override
protected @NotNull ListPluginComponent createListComponent(@NotNull IdeaPluginDescriptor descriptor) {
@SuppressWarnings("unchecked") ListPluginComponent component = new ListPluginComponent(myPluginModel, descriptor, LinkListener.NULL, true);
component.setOnlyUpdateMode();
component.getChooseUpdateButton().addActionListener(e -> updateButtons());
return component;
}
};
PluginManagerConfigurable.registerCopyProvider(myPluginsPanel);
myPluginsPanel.setSelectionListener(__ -> myDetailsPage.showPlugins(myPluginsPanel.getSelection()));
for (PluginDownloader plugin : downloaders) {
myGroup.descriptors.add(plugin.getDescriptor());
}
myGroup.sortByName();
myPluginsPanel.addGroup(myGroup);
setOKButtonText(IdeBundle.message("plugins.configurable.update.button"));
updateButtons();
init();
JRootPane rootPane = getPeer().getRootPane();
if (rootPane != null) {
rootPane.setPreferredSize(new JBDimension(800, 600));
}
}
private void updateButtons() {
long total = 0;
int count = 0;
for (ListPluginComponent plugin : myGroup.ui.plugins) {
if (plugin.getChooseUpdateButton().isSelected()) {
count++;
IdeaPluginDescriptor descriptor = plugin.getPluginDescriptor();
if (descriptor instanceof PluginNode) {
total += ((PluginNode)descriptor).getIntegerSize();
}
}
}
myTotalLabel.setText(IdeBundle.message("plugin.update.dialog.total.label",
StringUtilRt.formatFileSize(total).toUpperCase(Locale.ENGLISH)));
myTotalLabel.setVisible(total > 0);
getOKAction().setEnabled(count > 0);
}
public void setFinishCallback(@NotNull Runnable finishCallback) {
myFinishCallback = finishCallback;
}
@Override
protected void doOKAction() {
super.doOKAction();
if (myPlatformUpdate) return;
List<PluginDownloader> toDownloads = new ArrayList<>();
int index = 0;
for (PluginDownloader downloader : myDownloaders) {
ListPluginComponent component = myGroup.ui.plugins.get(index++);
if (component.getChooseUpdateButton().isSelected()) {
toDownloads.add(downloader);
}
}
runUpdateAll(toDownloads, getContentPanel(), myFinishCallback);
}
public static void runUpdateAll(@NotNull Collection<PluginDownloader> toDownload,
@Nullable JComponent ownerComponent,
@Nullable Runnable finishCallback) {
String message = IdeBundle.message("updates.notification.title", ApplicationNamesInfo.getInstance().getFullProductName());
new Task.Backgroundable(null, message, true, PerformInBackgroundOption.DEAF) {
@Override
public void run(@NotNull ProgressIndicator indicator) {
List<PluginDownloader> downloaders = downloadPluginUpdates(toDownload, indicator);
if (!downloaders.isEmpty()) {
ApplicationManager.getApplication().invokeLater(() -> {
PluginUpdateResult result = UpdateInstaller.installDownloadedPluginUpdates(downloaders, dl -> !dl.tryInstallWithoutRestart(ownerComponent));
if (result.getPluginsInstalled().size() > 0) {
if (!result.getRestartRequired()) {
String message;
if (result.getPluginsInstalled().size() == 1) {
IdeaPluginDescriptor plugin = result.getPluginsInstalled().get(0);
message = IdeBundle.message("notification.content.updated.plugin.to.version", plugin.getName(), plugin.getVersion());
}
else {
String names = result.getPluginsInstalled().stream().map(PluginDescriptor::getName).collect(Collectors.joining(", "));
message = IdeBundle.message("notification.content.updated.plugins", names);
}
UpdateChecker.getNotificationGroupForUpdateResults()
.createNotification(message, NotificationType.INFORMATION)
.setDisplayId("plugins.updated.without.restart")
.notify(myProject);
}
else if (WelcomeFrame.getInstance() == null) {
PluginManagerMain.notifyPluginsUpdated(null);
}
else {
PluginManagerConfigurable.shutdownOrRestartApp();
}
}
}, ownerComponent != null ? ModalityState.stateForComponent(ownerComponent) : ModalityState.defaultModalityState());
}
}
@Override
public void onFinished() {
if (finishCallback != null) {
finishCallback.run();
}
}
}.queue();
}
private static List<PluginDownloader> downloadPluginUpdates(Collection<PluginDownloader> toDownload, ProgressIndicator indicator) {
List<String> errors = new ArrayList<>();
try {
for (PluginDownloader downloader : toDownload) {
downloader.setErrorsCollector(errors);
}
return UpdateInstaller.downloadPluginUpdates(toDownload, indicator);
}
finally {
for (PluginDownloader downloader : toDownload) {
downloader.setErrorsCollector(null);
}
if (!errors.isEmpty()) {
String text = StringUtil.join(errors, "\n\n");
String title = IdeBundle.message("title.plugin.installation");
ApplicationManager.getApplication().invokeLater(() -> Messages.showErrorDialog(text, title), ModalityState.any());
}
}
}
@Override
public void doCancelAction() {
close(CANCEL_EXIT_CODE);
}
@Override
protected JPanel createSouthAdditionalPanel() {
JPanel panel = new Wrapper(myIgnoreAction);
panel.setBorder(JBUI.Borders.emptyLeft(10));
return panel;
}
@Override
protected String getHelpId() {
return "plugin.update.dialog";
}
@Override
protected @NotNull DialogStyle getStyle() {
return DialogStyle.COMPACT;
}
@Override
protected String getDimensionServiceKey() {
return "#com.intellij.openapi.updateSettings.impl.PluginUpdateInfoDialog";
}
@Override
protected JComponent createCenterPanel() {
OnePixelSplitter splitter = new OnePixelSplitter(false, 0.45f) {
@Override
protected Divider createDivider() {
Divider divider = super.createDivider();
divider.setBackground(PluginManagerConfigurable.SEARCH_FIELD_BORDER_COLOR);
return divider;
}
};
myGroup.ui.panel.getParent().remove(myGroup.ui.panel);
myGroup.ui.panel.setPreferredSize(new Dimension());
JPanel leftPanel = new JPanel(new BorderLayout());
leftPanel.add(PluginManagerConfigurable.createScrollPane(myPluginsPanel, true));
OpaquePanel titlePanel = new OpaquePanel(new BorderLayout(), PluginManagerConfigurable.MAIN_BG_COLOR);
titlePanel.setBorder(JBUI.Borders.empty(13, 12));
leftPanel.add(titlePanel, BorderLayout.SOUTH);
myTotalLabel.setForeground(PluginsGroupComponent.SECTION_HEADER_FOREGROUND);
titlePanel.add(myTotalLabel);
splitter.setFirstComponent(leftPanel);
splitter.setSecondComponent(myDetailsPage);
return splitter;
}
}
| |
/*
* Copyright (c) 2014 Vishal Disawar
*
* Dual licensed under Apache2.0 and MIT Open Source License (included below):
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.coursera.cardswap;
import android.app.Activity;
import android.content.Intent;
import android.content.SharedPreferences;
import android.nfc.NdefMessage;
import android.nfc.NdefRecord;
import android.nfc.NfcAdapter;
import android.nfc.NfcEvent;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.preference.PreferenceManager;
import android.view.Display;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Surface;
import android.view.View;
import android.view.WindowManager;
import android.widget.TextView;
import android.widget.Toast;
import java.nio.charset.Charset;
/**
* Main activity of this app. This is where the main business card is shown and where the NFC
* sends the information. More info on NFC can be found at
* <a href="http://developer.android.com/guide/topics/connectivity/nfc/nfc.html">NFC Basics</a>
*
* @author Vishal Disawar
*/
public class CardSwap extends Activity
implements NfcAdapter.CreateNdefMessageCallback, NfcAdapter.OnNdefPushCompleteCallback {
private NfcAdapter nfcAdapter;
private static final int MESSAGE_SENT = 1;
private static final int CARD_MADE = 1;
private String personName;
private String business;
private String address;
private String cityStateZip;
private String number;
private String email;
private String website;
/**
* A handler runs in a background thread, and communicates with other
* handlers and threads that send a message to this activity. In this case
* this handler will receive a message from onNdefPushComplete
*/
private volatile Handler handler;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_card_swap);
if (savedInstanceState == null) {
// the fragment manager has methods to add and remove fragments
// in this case we are adding a new Card Fragment our empty FrameLayout
// in the activity_card_swap.xml file
getFragmentManager().beginTransaction()
.add(R.id.card_fragment_container, new CardFragment())
.commit(); // all fragment transactions are completed with a call to commit()
}
}
@Override
protected void onResume() {
super.onResume();
//get a reference to all the TextViews in fragment_card.xml
final TextView personNameTView = (TextView) findViewById(R.id.person_name);
final TextView businessNameTView = (TextView) findViewById(R.id.business_name);
final TextView addressTView = (TextView) findViewById(R.id.person_address);
final TextView cityStateZipTView = (TextView) findViewById(R.id.person_citystate);
final TextView numberTView = (TextView) findViewById(R.id.person_number);
final TextView emailTView = (TextView) findViewById(R.id.person_email);
final TextView websiteTView = (TextView) findViewById(R.id.person_website);
final SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(this);
final boolean edited = sp.getBoolean(CardEdit.CARD_EDITED_KEY, false);
if (!edited) {
// get the display of the window to get system orientation changes
Display display = ((WindowManager) getSystemService(WINDOW_SERVICE))
.getDefaultDisplay();
int rotation = display.getRotation(); // get the rotation of the
// screen
// we want to hide the instructions for when the user goes in
// landscape mode since we
// want the card to take up most of the screen
// reference to TextViews in activity_card_swap.xml
TextView sampleTitle = (TextView) findViewById(R.id.sample_title);
TextView instructions = (TextView) findViewById(R.id.instructions);
if (rotation == Surface.ROTATION_90
|| rotation == Surface.ROTATION_270) {
sampleTitle.setVisibility(View.GONE);
instructions.setVisibility(View.GONE);
} else {
sampleTitle.setVisibility(View.VISIBLE);
instructions.setVisibility(View.VISIBLE);
}
}
// only ready the activity for NFC if the user put their own info for their card
if (edited) {
nfcAdapter = NfcAdapter.getDefaultAdapter(this); //get the adapter for NFC for this phone
if (nfcAdapter == null)
Toast.makeText(this, "NFC is unavailable", Toast.LENGTH_LONG).show();
else {
//create the callback if NFC is present
nfcAdapter.setNdefPushMessageCallback(this, this);
//register callback to listen for data sent success
nfcAdapter.setOnNdefPushCompleteCallback(this, this);
}
// Get the text from the card for each info to send over
personName = personNameTView.getText().toString();
business = businessNameTView.getText().toString();
address = addressTView.getText().toString();
cityStateZip = cityStateZipTView.getText().toString();
number = numberTView.getText().toString();
email = emailTView.getText().toString();
website = websiteTView.getText().toString();
}
}
@Override
protected void onPause() {
super.onPause();
if (nfcAdapter != null)
nfcAdapter.disableForegroundDispatch(this); //disable NFC when activity is not in view
}
/*
* Use this method to create and prepare the data and encapsulate in an NdefMessage.
* The NdefMessage sends the data through NFC beam. This method gets called when the phones
* touch together.
*/
@Override
public NdefMessage createNdefMessage(NfcEvent nfcEvent) {
String mimeType = "application/com.coursera.cardswap";
byte[] mimeBytes = mimeType.getBytes(Charset.forName("US-ASCII"));
//encapsulate the data in an array of NdefRecords
NdefRecord[] ndefRecord = new NdefRecord[]{
// each NdefRecord has to have a tnf, type, id, and payload.
// the tnf specifies what the data will be which we specify as a MIME.
// the type is what the intent filter (in the manifest) for an activity will look for.
// We give a default byte for the id. And the payload is the data in bytes.
new NdefRecord(NdefRecord.TNF_MIME_MEDIA, mimeBytes, new byte[0], personName.getBytes()),
new NdefRecord(NdefRecord.TNF_MIME_MEDIA, mimeBytes, new byte[0], business.getBytes()),
new NdefRecord(NdefRecord.TNF_MIME_MEDIA, mimeBytes, new byte[0], address.getBytes()),
new NdefRecord(NdefRecord.TNF_MIME_MEDIA, mimeBytes, new byte[0], cityStateZip.getBytes()),
new NdefRecord(NdefRecord.TNF_MIME_MEDIA, mimeBytes, new byte[0], number.getBytes()),
new NdefRecord(NdefRecord.TNF_MIME_MEDIA, mimeBytes, new byte[0], email.getBytes()),
new NdefRecord(NdefRecord.TNF_MIME_MEDIA, mimeBytes, new byte[0], website.getBytes()),
// this creates the application record for this app, which takes the user to this app
// in the google play store, if when the NFC happens, the receiving phone doesn't have
// this app installed
NdefRecord.createApplicationRecord("com.coursera.cardswap")
};
//An NdefMessage is what gets sent in the NFC and we pass the array of our NdefRecords
//to encapsulate it in the NdefMessage
return new NdefMessage(ndefRecord);
}
@Override
protected void onStart() {
super.onStart();
// You have to be careful with memory leaks with inner classes and
// handlers
// In this app we remove the handler and messages in onStop
// To ensure the handler cannot outlive the Activity object
handler = new Handler() {
@Override
public void handleMessage(Message msg) {
// if the message 'what' matches the constant MESSAGE_SENT, it
// means the callback
// to onNdefPushComplete happened which means the NFC was a
// success
if (msg.what == MESSAGE_SENT) {
Toast.makeText(getApplicationContext(), "Contact Sent!",
Toast.LENGTH_LONG).show();
}
}
};
}
@Override
protected void onStop() {
super.onStop();
Handler oldHandler = handler;
handler = null;
if (oldHandler != null) {
oldHandler.removeMessages(MESSAGE_SENT);
}
}
/**
* This method handles the event for when the NFC beam gets completed
*
* @param nfcEvent the NfcEvent
*/
@Override
public void onNdefPushComplete(NfcEvent nfcEvent) {
//A handler is needed to send a message to this activity
//because NFC happens in a Binder thread.
//The target is created by the OS, so we just have to send it.
//We give obtainMessage the parameter MESSAGE_SENT so we can check when we handle
//the message that message was sent from this method
Handler h = handler;
if (h != null)
h.obtainMessage(MESSAGE_SENT).sendToTarget();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.card_swap, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_card_edit) {
// start activity for result starts another activity expecting to get something back
// things that are sent back could be data or just indication of success
startActivityForResult(new Intent(this, CardEdit.class), CARD_MADE);
}
return id == R.id.action_settings || super.onOptionsItemSelected(item);
}
//this method executes when the intent that started the sub-activity finished
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == CARD_MADE) {
if (resultCode == RESULT_OK)
// if we get passed RESULT_OK it is an indication of success, so tell the user via
// a Toast. A toast is text that appears on the screen for a short amount of time
Toast.makeText(this, "Your changes were successfully saved", Toast.LENGTH_SHORT)
.show();
}
}
}
| |
/*
* Copyright (C) 2012 Joakim Persson, Daniel Augurell, Adrian Bjugard, Andreas Rolen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.chalmers.dat255.group09.Alarmed.database;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.util.Log;
import edu.chalmers.dat255.group09.Alarmed.model.Alarm;
/**
* A class to help the creation and accessing of alarms in a database. Gives the
* ability to create and delete alarms, and fetching one or all alarms.
*
* @author Daniel Augurell
*
*/
public class DatabaseHandler implements AlarmHandler {
private Context aCtx;
private DatabaseHelper aDbHelper;
private SQLiteDatabase aDb;
/**
* Database SQL statements.
*/
private static final String DB_NAME = "data";
private static final String DB_TABLE = "alarms";
public static final String KEY_TIME = "time";
public static final String KEY_DAYSOFWEEK = "daysofweek";
public static final String KEY_ROWID = "_id";
public static final String KEY_ENABLED = "enabled";
public static final String KEY_MODULE = "module";
public static final String KEY_VOLUME = "volume";
public static final String KEY_VIBRATION = "vibration";
public static final String KEY_ALARMTONE = "alarmtone";
public static final String[] KEYS = {KEY_ROWID, KEY_TIME, KEY_DAYSOFWEEK,
KEY_ENABLED, KEY_MODULE, KEY_VOLUME, KEY_VIBRATION, KEY_ALARMTONE };
private static final String DB_CREATE = "CREATE TABLE " + DB_TABLE + " ("
+ KEY_ROWID + " INTEGER PRIMARY KEY , " + KEY_TIME + " DATETIME, "
+ KEY_DAYSOFWEEK + " INTEGER," + KEY_ENABLED + " BOOLEAN,"
+ KEY_MODULE + " STRING," + KEY_VOLUME + " INTEGER,"
+ KEY_VIBRATION + " BOOLEAN," + KEY_ALARMTONE + " STRING);";
private static final int DB_VERSION = 9;
/**
*
* A help class to open or create the database.
*
* @author Daniel Augurell
*
*/
private static class DatabaseHelper extends SQLiteOpenHelper {
/**
* Constructor to the DatabaseHelper.
*
* @param context
* The android context
*/
DatabaseHelper(Context context) {
super(context, DB_NAME, null, DB_VERSION);
}
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL(DB_CREATE);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
db.execSQL("DROP TABLE IF EXISTS " + DB_TABLE);
onCreate(db);
Log.w("DATABASE", "The database is changed from version "
+ oldVersion + " to version " + newVersion);
}
@Override
public void onDowngrade(SQLiteDatabase db, int oldVersion,
int newVersion) {
onUpgrade(db, oldVersion, newVersion);
}
}
/**
* Constructor for the DatabaseHandler.
*
* @param ctx
* The android context
*/
public DatabaseHandler(Context ctx) {
aCtx = ctx;
}
@Override
public AlarmHandler openCon() {
if (aDbHelper != null) {
closeCon();
}
aDbHelper = new DatabaseHelper(aCtx);
aDb = aDbHelper.getWritableDatabase();
return this;
}
@Override
public void closeCon() {
aDbHelper.close();
}
@Override
public long addAlarm(Alarm alarm) {
ContentValues alarmTime = new ContentValues();
alarmTime.putNull(KEY_ROWID);
alarmTime.put(KEY_DAYSOFWEEK, alarm.getDaysOfWeek());
alarmTime.put(KEY_ENABLED, alarm.isEnabled());
alarmTime.put(KEY_TIME, alarm.toString());
alarmTime.put(KEY_MODULE, alarm.getModule());
alarmTime.put(KEY_VOLUME, alarm.getVolume());
alarmTime.put(KEY_VIBRATION, alarm.isVibrationEnabled());
alarmTime.put(KEY_ALARMTONE, alarm.getToneUri());
return aDb.insert(DB_TABLE, null, alarmTime);
}
@Override
public boolean deleteAlarm(int alarmID) {
return aDb.delete(DB_TABLE, KEY_ROWID + "=" + alarmID, null) > 0;
}
@Override
public Alarm fetchFirstEnabledAlarm() {
List<Alarm> list = fetchAllAlarms();
Collections.sort(list);
for (Alarm alarm : list) {
if (alarm.isEnabled()) {
return alarm;
}
}
return null;
}
/**
* Fetches all alarms as Cursor to the database.
*
* @return Cursor with all the alarmdata
*/
private Cursor getAlarms() {
return aDb.query(true, DB_TABLE, KEYS, null, null, null, null, null,
null);
}
@Override
public Alarm fetchAlarm(int alarmID) {
Cursor cursor = aDb.query(true, DB_TABLE, KEYS, KEY_ROWID + "="
+ alarmID, null, null, null, null, null);
if (cursor.moveToFirst()) {
return getAlarmFromCursor(cursor);
}
return null;
}
@Override
public int getNumberOfAlarms() {
return getAlarms().getCount();
}
@Override
public List<Alarm> fetchAllAlarms() {
Cursor cursor = getAlarms();
ArrayList<Alarm> list = new ArrayList<Alarm>();
if (cursor != null && cursor.moveToFirst()) {
do {
list.add(getAlarmFromCursor(cursor));
} while (cursor.moveToNext());
}
return list;
}
/**
* Gets the alarm from the data at the cursors current position.
*
* @param cursor
* The cursor set at the specified position.
* @return An Alarm with the specified data.
*/
private Alarm getAlarmFromCursor(Cursor cursor) {
String[] time = cursor.getString(cursor.getColumnIndex(KEY_TIME))
.split(":");
Alarm a = new Alarm(Integer.parseInt(time[0]),
Integer.parseInt(time[1]), cursor.getInt(cursor
.getColumnIndex(KEY_ROWID)), cursor.getString(cursor
.getColumnIndex(KEY_MODULE)), cursor.getInt(cursor
.getColumnIndex(KEY_VOLUME)));
a.setEnabled(cursor.getInt(cursor.getColumnIndex(KEY_ENABLED)) > 0);
a.setDaysOfWeek(cursor.getInt(cursor.getColumnIndex(KEY_DAYSOFWEEK)));
a.setVibrationEnabled(cursor.getInt(cursor
.getColumnIndex(KEY_VIBRATION)) > 0);
a.setToneUri(cursor.getString(cursor.getColumnIndex(KEY_ALARMTONE)));
return a;
}
@Override
public boolean isEnabled(int id) {
Cursor cursor = aDb.query(true, DB_TABLE, KEYS, KEY_ROWID + "=" + id,
null, null, null, null, null);
return cursor.getInt(cursor.getColumnIndex(KEY_ENABLED)) > 0;
}
@Override
public boolean setAlarmEnabled(int id, boolean enable) {
ContentValues values = new ContentValues();
values.put(KEY_ENABLED, enable);
return aDb.update(DB_TABLE, values, KEY_ROWID + "=" + id, null) > 0;
}
}
| |
/*
* Copyright (C) 2005-2016 Christoph Rupp (chris@crupp.de).
* All Rights Reserved.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* See the file COPYING for License information.
*/
import de.crupp.upscaledb.*;
import junit.framework.TestCase;
public class CursorTest extends TestCase {
private class MyComparator implements CompareCallback
{
public int m_counter;
public int compare(byte[] b1, byte[] b2) {
m_counter++;
if (b1.length < b2.length)
return (-1);
if (b1.length > b2.length)
return (+1);
for (int i = b1.length; --i >= 0; ) {
if (b1[i] < b2[i])
return (-1);
if (b1[i] > b2[i])
return (+1);
}
return 0;
}
}
private Environment m_env;
private Database m_db;
protected void setUp() throws Exception {
super.setUp();
try {
m_env = new Environment();
m_env.create("jtest.db");
m_db = m_env.createDatabase((short)1, Const.UPS_ENABLE_DUPLICATE_KEYS);
}
catch (DatabaseException err) {
fail("DatabaseException " + err.getMessage());
}
}
protected void tearDown() throws Exception {
super.tearDown();
m_db.close();
m_env.close();
}
public void testCursorDatabaseLong() {
Cursor c = new Cursor(m_db, 0x13);
assertEquals(0x13, c.getHandle());
}
public void testCreate() {
Cursor c;
try {
c = new Cursor(m_db);
c.close();
}
catch (DatabaseException err) {
fail("DatabaseException " + err.getMessage());
}
}
public void testCloneCursor() {
Cursor c1, c2;
try {
c1 = new Cursor(m_db);
c2 = c1.cloneCursor();
c1.close();
c2.close();
}
catch (DatabaseException err) {
fail("DatabaseException " + err.getMessage());
}
}
public void testMove() {
byte[] key = new byte[10];
byte[] record = new byte[10];
try {
Cursor c = new Cursor(m_db);
key[0] = 0;
m_db.insert(key, record);
key[0] = 1;
m_db.insert(key, record);
key[0] = 2;
m_db.insert(key, record);
key[0] = 3;
m_db.insert(key, record);
key[0] = 4;
m_db.insert(key, record);
c.move(Const.UPS_CURSOR_NEXT);
c.move(Const.UPS_CURSOR_NEXT);
c.move(Const.UPS_CURSOR_PREVIOUS);
c.move(Const.UPS_CURSOR_LAST);
c.move(Const.UPS_CURSOR_FIRST);
}
catch (DatabaseException err) {
fail("DatabaseException " + err.getMessage());
}
}
public void testMoveNegative() {
try {
Cursor c = new Cursor(m_db);
c.move(Const.UPS_CURSOR_NEXT);
}
catch (DatabaseException err) {
assertEquals(Const.UPS_KEY_NOT_FOUND, err.getErrno());
}
}
public void testMoveFirst() {
byte[] key = new byte[10];
byte[] record = new byte[10];
try {
Cursor c = new Cursor(m_db);
m_db.insert(key, record);
c.moveFirst();
}
catch (DatabaseException err) {
fail("DatabaseException "+err.getMessage());
}
}
public void testMoveLast() {
byte[] key = new byte[10];
byte[] record = new byte[10];
try {
Cursor c = new Cursor(m_db);
m_db.insert(key, record);
c.moveLast();
}
catch (DatabaseException err) {
fail("DatabaseException "+err.getMessage());
}
}
public void testMoveNext() {
byte[] key = new byte[10];
byte[] record = new byte[10];
try {
Cursor c = new Cursor(m_db);
m_db.insert(key, record);
c.moveNext();
}
catch (DatabaseException err) {
fail("DatabaseException "+err.getMessage());
}
}
public void testMovePrevious() {
byte[] key = new byte[10];
byte[] record = new byte[10];
try {
Cursor c = new Cursor(m_db);
m_db.insert(key, record);
c.movePrevious();
}
catch (DatabaseException err) {
fail("DatabaseException "+err.getMessage());
}
}
public void assertByteArrayEquals(byte[] r1, byte[] r2) {
assertEquals(r1.length, r2.length);
for (int i = 0; i < r1.length; i++) {
assertEquals(r1[i], r2[i]);
}
}
public void testGetKey() {
byte[] key = new byte[10];
key[0] = 0x13;
byte[] record = new byte[10];
try {
Cursor c = new Cursor(m_db);
m_db.insert(key, record);
c.moveFirst();
byte[] k = c.getKey();
assertByteArrayEquals(key, k);
}
catch (DatabaseException err) {
fail("DatabaseException " + err.getMessage());
}
}
public void testGetRecord() {
byte[] key = new byte[10];
byte[] record = new byte[10];
record[0] = 0x14;
try {
Cursor c = new Cursor(m_db);
m_db.insert(key, record);
c.moveFirst();
byte[] r = c.getRecord();
assertByteArrayEquals(record, r);
}
catch (DatabaseException err) {
fail("DatabaseException " + err.getMessage());
}
}
public void testOverwrite() {
byte[] key = new byte[10];
byte[] record1 = new byte[10];
byte[] record2 = new byte[10];
record1[0] = 0x14;
record2[0] = 0x15;
try {
Cursor c = new Cursor(m_db);
m_db.insert(key, record1);
c.moveFirst();
c.overwrite(record2);
byte[] r2 = c.getRecord();
assertByteArrayEquals(record2, r2);
}
catch (DatabaseException err) {
fail("DatabaseException "+err.getMessage());
}
}
public void testFind() {
byte[] key = new byte[10];
byte[] record = new byte[10];
key[0] = 0x14;
try {
Cursor c = new Cursor(m_db);
m_db.insert(key, record);
c.find(key);
byte[] k = c.getKey();
byte[] r = c.getRecord();
assertByteArrayEquals(key, k);
assertByteArrayEquals(record, r);
}
catch (DatabaseException err) {
fail("DatabaseException " + err.getMessage());
}
}
public void testInsertByteArrayByteArray() {
byte[] key = new byte[10];
byte[] record = new byte[10];
key[0] = 0x14;
try {
Cursor c = new Cursor(m_db);
c.insert(key, record);
c.find(key);
byte[] k = c.getKey();
byte[] r = c.getRecord();
assertByteArrayEquals(key, k);
assertByteArrayEquals(record, r);
}
catch (DatabaseException err) {
fail("DatabaseException "+err.getMessage());
}
}
public void testInsertByteArrayByteArrayInt() {
byte[] key = new byte[10];
byte[] record = new byte[10];
key[0] = 0x14;
try {
Cursor c = new Cursor(m_db);
c.insert(key, record);
record[0] = 0x14;
c.insert(key, record, Const.UPS_OVERWRITE);
record[0] = 0x15;
c.insert(key, record, Const.UPS_OVERWRITE);
byte[] r = c.getRecord();
assertByteArrayEquals(record, r);
}
catch (DatabaseException err) {
fail("DatabaseException " + err.getMessage());
}
}
public void testErase() {
byte[] key = new byte[10];
byte[] record = new byte[10];
Cursor c = null;
try {
c = new Cursor(m_db);
m_db.insert(key, record);
c.find(key);
c.erase();
}
catch (DatabaseException err) {
fail("DatabaseException " + err.getMessage());
}
try {
c.find(key);
}
catch (DatabaseException err) {
assertEquals(Const.UPS_KEY_NOT_FOUND, err.getErrno());
}
}
public void testGetDuplicateCount() {
byte[] key = new byte[10];
byte[] record = new byte[10];
try {
Cursor c = new Cursor(m_db);
c.insert(key, record, Const.UPS_DUPLICATE);
assertEquals(1, c.getDuplicateCount());
record[0] = 0x14;
c.insert(key, record, Const.UPS_DUPLICATE);
assertEquals(2, c.getDuplicateCount());
record[0] = 0x15;
c.insert(key, record, Const.UPS_DUPLICATE);
assertEquals(3, c.getDuplicateCount());
}
catch (DatabaseException err) {
fail("DatabaseException " + err.getMessage());
}
}
public void testSetComparator() throws Exception {
byte[] k = new byte[5];
byte[] r = new byte[5];
MyComparator cmp = new MyComparator();
Cursor c;
Parameter[] params = new Parameter[1];
params[0] = new Parameter();
params[0].name = Const.UPS_PARAM_KEY_TYPE;
params[0].value = Const.UPS_TYPE_CUSTOM;
try {
tearDown();
m_env = new Environment();
m_env.create("jtest.db");
m_db = m_env.createDatabase((short)1, Const.UPS_ENABLE_DUPLICATE_KEYS,
params);
c = new Cursor(m_db);
m_db.setComparator(cmp);
c.insert(k, r);
k[0] = 1;
c.insert(k, r);
k[0] = 2;
c.insert(k, r);
k[0] = 3;
c.insert(k, r);
k[0] = 4;
}
catch (DatabaseException err) {
fail("Exception " + err);
}
assertEquals(4, cmp.m_counter);
}
}
| |
package hudson.plugins.warnings.parser;
import static org.junit.Assert.*;
import hudson.plugins.analysis.util.model.FileAnnotation;
import hudson.plugins.analysis.util.model.Priority;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
import org.junit.Test;
/**
* Tests the class {@link Gcc4CompilerParser}.
*/
public class Gcc4CompilerParserTest extends ParserTester {
private static final String THERE_ARE_WARNINGS_FOUND = "There are warnings found";
private static final String WARNING_CATEGORY = "Warning";
private static final String ERROR_CATEGORY = "Error";
private static final String WARNING_TYPE = new Gcc4CompilerParser().getGroup();
/**
* Parses a file with one warning that are started by ant.
*
* @throws IOException
* if the file could not be read
* @see <a href="http://issues.jenkins-ci.org/browse/JENKINS-9926">Issue 9926</a>
*/
@Test
public void issue9926() throws IOException {
Collection<FileAnnotation> warnings = new Gcc4CompilerParser().parse(openFile("issue9926.txt"));
assertEquals(WRONG_NUMBER_OF_WARNINGS_DETECTED, 1, warnings.size());
FileAnnotation annotation = warnings.iterator().next();
checkWarning(annotation, 52, "large integer implicitly truncated to unsigned type",
"src/test_simple_sgs_message.cxx",
WARNING_TYPE, WARNING_CATEGORY, Priority.NORMAL);
}
/**
* Parses a warning log with 1 warning.
*
* @throws IOException
* if the file could not be read
* @see <a href="http://issues.jenkins-ci.org/browse/JENKINS-6563">Issue 6563</a>
*/
@Test
public void issue6563() throws IOException {
Collection<FileAnnotation> warnings = new Gcc4CompilerParser().parse(openFile("issue6563.txt"));
assertEquals(WRONG_NUMBER_OF_WARNINGS_DETECTED, 10, warnings.size());
}
/**
* Parses a file with GCC warnings.
*
* @throws IOException
* if the file could not be read
*/
@Test
public void testWarningsParser() throws IOException {
Collection<FileAnnotation> warnings = new Gcc4CompilerParser().parse(openFile());
assertEquals(WRONG_NUMBER_OF_WARNINGS_DETECTED, 13, warnings.size());
Iterator<FileAnnotation> iterator = warnings.iterator();
checkWarning(iterator.next(),
451,
"'void yyunput(int, char*)' defined but not used",
"testhist.l",
WARNING_TYPE, WARNING_CATEGORY, Priority.NORMAL);
checkWarning(iterator.next(),
73,
"implicit typename is deprecated, please see the documentation for details",
"/u1/drjohn/bfdist/packages/RegrTest/V00-03-01/RgtAddressLineScan.cc",
WARNING_TYPE, ERROR_CATEGORY, Priority.HIGH);
checkWarning(iterator.next(),
4,
"foo.h: No such file or directory",
"foo.cc",
WARNING_TYPE, ERROR_CATEGORY, Priority.HIGH);
checkWarning(iterator.next(),
678,
"missing initializer for member sigaltstack::ss_sp",
"../../lib/linux-i686/include/boost/test/impl/execution_monitor.ipp",
WARNING_TYPE, WARNING_CATEGORY, Priority.NORMAL);
checkWarning(iterator.next(),
678,
"missing initializer for member sigaltstack::ss_flags",
"../../lib/linux-i686/include/boost/test/impl/execution_monitor.ipp",
WARNING_TYPE, WARNING_CATEGORY, Priority.NORMAL);
checkWarning(iterator.next(),
678,
"missing initializer for member sigaltstack::ss_size",
"../../lib/linux-i686/include/boost/test/impl/execution_monitor.ipp",
WARNING_TYPE, WARNING_CATEGORY, Priority.NORMAL);
checkWarning(iterator.next(),
52,
"large integer implicitly truncated to unsigned type",
"src/test_simple_sgs_message.cxx",
WARNING_TYPE, WARNING_CATEGORY, Priority.NORMAL);
checkWarning(iterator.next(),
352,
"'s2.mepSector2::lubrications' may be used uninitialized in this function",
"main/mep.cpp",
WARNING_TYPE, WARNING_CATEGORY, Priority.NORMAL);
checkWarning(iterator.next(),
6,
"passing 'Test' chooses 'int' over 'unsigned int'",
"warnings.cc",
WARNING_TYPE, WARNING_CATEGORY, Priority.NORMAL);
checkWarning(iterator.next(),
6,
"in call to 'std::basic_ostream<_CharT, _Traits>& std::basic_ostream<_CharT, _Traits>::operator<<(int) [with _CharT = char, _Traits = std::char_traits<char>]'",
"warnings.cc",
WARNING_TYPE, WARNING_CATEGORY, Priority.NORMAL);
checkWarning(iterator.next(),
33,
"#warning This file includes at least one deprecated or antiquated header which may be removed without further notice at a future date. Please use a non-deprecated interface with equivalent functionality instead. For a listing of replacement headers and interfaces, consult the file backward_warning.h. To disable this warning use -Wno-deprecated.",
"/usr/include/c++/4.3/backward/backward_warning.h",
WARNING_TYPE, WARNING_CATEGORY, Priority.NORMAL);
checkWarning(iterator.next(),
8,
"'bar' was not declared in this scope",
"fo:oo.cpp",
WARNING_TYPE, ERROR_CATEGORY, Priority.HIGH);
checkWarning(iterator.next(),
12,
"expected ';' before 'return'",
"fo:oo.cpp",
WARNING_TYPE, ERROR_CATEGORY, Priority.HIGH);
}
/**
* Parses a warning log with 10 template warnings.
*
* @throws IOException
* if the file could not be read
* @see <a href="http://issues.jenkins-ci.org/browse/JENKINS-5606">Issue 5606</a>
*/
@Test
public void issue5606() throws IOException {
Collection<FileAnnotation> warnings = new Gcc4CompilerParser().parse(openFile("issue5606.txt"));
assertEquals(WRONG_NUMBER_OF_WARNINGS_DETECTED, 10, warnings.size());
}
/**
* Parses a warning log with multi line warnings.
*
* @throws IOException
* if the file could not be read
* @see <a href="http://issues.jenkins-ci.org/browse/JENKINS-5605">Issue 5605</a>
*/
@Test
public void issue5605() throws IOException {
Collection<FileAnnotation> warnings = new Gcc4CompilerParser().parse(openFile("issue5605.txt"));
assertEquals(WRONG_NUMBER_OF_WARNINGS_DETECTED, 6, warnings.size());
}
/**
* Parses a warning log with multi line warnings.
*
* @throws IOException
* if the file could not be read
* @see <a href="http://issues.jenkins-ci.org/browse/JENKINS-5445">Issue 5445</a>
*/
@Test
public void issue5445() throws IOException {
Collection<FileAnnotation> warnings = new Gcc4CompilerParser().parse(openFile("issue5445.txt"));
assertEquals(THERE_ARE_WARNINGS_FOUND, 0, warnings.size());
}
/**
* Parses a warning log with autoconf messages. There should be no warning.
*
* @throws IOException
* if the file could not be read
* @see <a href="http://issues.jenkins-ci.org/browse/JENKINS-5870">Issue 5870</a>
*/
@Test
public void issue5870() throws IOException {
Collection<FileAnnotation> warnings = new Gcc4CompilerParser().parse(openFile("issue5870.txt"));
assertEquals(THERE_ARE_WARNINGS_FOUND, 0, warnings.size());
}
/**
* Classify warnings by gcc 4.6 or later.
*
* @throws IOException
* if the file could not be read
* @see <a href="https://issues.jenkins-ci.org/browse/JENKINS-11799">Issue 11799</a>
*/
@Test
public void issue11799() throws IOException {
Collection<FileAnnotation> warnings = new Gcc4CompilerParser().parse(openFile("issue11799.txt"));
assertEquals(WRONG_NUMBER_OF_WARNINGS_DETECTED, 4, warnings.size());
Iterator<FileAnnotation> iterator = warnings.iterator();
checkWarning(iterator.next(),
4,
"implicit declaration of function 'undeclared_function' [-Wimplicit-function-declaration]",
"gcc4warning.c",
WARNING_TYPE, WARNING_CATEGORY + ":implicit-function-declaration", Priority.NORMAL);
checkWarning(iterator.next(),
3,
"unused variable 'unused_local' [-Wunused-variable]",
"gcc4warning.c",
WARNING_TYPE, WARNING_CATEGORY + ":unused-variable", Priority.NORMAL);
checkWarning(iterator.next(),
1,
"unused parameter 'unused_parameter' [-Wunused-parameter]",
"gcc4warning.c",
WARNING_TYPE, WARNING_CATEGORY + ":unused-parameter", Priority.NORMAL);
checkWarning(iterator.next(),
5,
"control reaches end of non-void function [-Wreturn-type]",
"gcc4warning.c",
WARNING_TYPE, WARNING_CATEGORY + ":return-type", Priority.NORMAL);
}
@Override
protected String getWarningsFile() {
return "gcc4.txt";
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.integration;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.IntegerDeserializer;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.common.utils.MockTime;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KafkaStreams.State;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster;
import org.apache.kafka.streams.integration.utils.IntegrationTestUtils;
import org.apache.kafka.streams.integration.utils.IntegrationTestUtils.TrackingStateRestoreListener;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.processor.ProcessorContext;
import org.apache.kafka.streams.processor.StateRestoreListener;
import org.apache.kafka.streams.processor.TaskId;
import org.apache.kafka.streams.processor.internals.StateDirectory;
import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.StoreBuilder;
import org.apache.kafka.streams.state.Stores;
import org.apache.kafka.streams.state.internals.InMemoryKeyValueStore;
import org.apache.kafka.streams.state.internals.KeyValueStoreBuilder;
import org.apache.kafka.streams.state.internals.OffsetCheckpoint;
import org.apache.kafka.test.IntegrationTest;
import org.apache.kafka.test.TestUtils;
import org.hamcrest.CoreMatchers;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.io.File;
import java.io.IOException;
import java.time.Duration;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.junit.rules.TestName;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.purgeLocalStreamsState;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.startApplicationAndWaitUntilRunning;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.waitForApplicationState;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.waitForCompletion;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.waitForStandbyCompletion;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.IsEqual.equalTo;
import static org.junit.Assert.assertTrue;
@Category({IntegrationTest.class})
public class RestoreIntegrationTest {
private static final int NUM_BROKERS = 1;
public static final EmbeddedKafkaCluster CLUSTER = new EmbeddedKafkaCluster(NUM_BROKERS);
@BeforeClass
public static void startCluster() throws IOException {
CLUSTER.start();
}
@AfterClass
public static void closeCluster() {
CLUSTER.stop();
}
@Rule
public final TestName testName = new TestName();
private String appId;
private String inputStream;
private final int numberOfKeys = 10000;
private KafkaStreams kafkaStreams;
@Before
public void createTopics() throws InterruptedException {
appId = safeUniqueTestName(RestoreIntegrationTest.class, testName);
inputStream = appId + "-input-stream";
CLUSTER.createTopic(inputStream, 2, 1);
}
private Properties props() {
final Properties streamsConfiguration = new Properties();
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, appId);
streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
streamsConfiguration.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory(appId).getPath());
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000L);
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
return streamsConfiguration;
}
@After
public void shutdown() {
if (kafkaStreams != null) {
kafkaStreams.close(Duration.ofSeconds(30));
}
}
@Test
public void shouldRestoreStateFromSourceTopic() throws Exception {
final AtomicInteger numReceived = new AtomicInteger(0);
final StreamsBuilder builder = new StreamsBuilder();
final Properties props = props();
props.put(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG, StreamsConfig.OPTIMIZE);
// restoring from 1000 to 4000 (committed), and then process from 4000 to 5000 on each of the two partitions
final int offsetLimitDelta = 1000;
final int offsetCheckpointed = 1000;
createStateForRestoration(inputStream, 0);
setCommittedOffset(inputStream, offsetLimitDelta);
final StateDirectory stateDirectory = new StateDirectory(new StreamsConfig(props), new MockTime(), true, false);
// note here the checkpointed offset is the last processed record's offset, so without control message we should write this offset - 1
new OffsetCheckpoint(new File(stateDirectory.getOrCreateDirectoryForTask(new TaskId(0, 0)), ".checkpoint"))
.write(Collections.singletonMap(new TopicPartition(inputStream, 0), (long) offsetCheckpointed - 1));
new OffsetCheckpoint(new File(stateDirectory.getOrCreateDirectoryForTask(new TaskId(0, 1)), ".checkpoint"))
.write(Collections.singletonMap(new TopicPartition(inputStream, 1), (long) offsetCheckpointed - 1));
final CountDownLatch startupLatch = new CountDownLatch(1);
final CountDownLatch shutdownLatch = new CountDownLatch(1);
builder.table(inputStream, Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as("store").withKeySerde(Serdes.Integer()).withValueSerde(Serdes.Integer()))
.toStream()
.foreach((key, value) -> {
if (numReceived.incrementAndGet() == offsetLimitDelta * 2) {
shutdownLatch.countDown();
}
});
kafkaStreams = new KafkaStreams(builder.build(props), props);
kafkaStreams.setStateListener((newState, oldState) -> {
if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) {
startupLatch.countDown();
}
});
final AtomicLong restored = new AtomicLong(0);
kafkaStreams.setGlobalStateRestoreListener(new StateRestoreListener() {
@Override
public void onRestoreStart(final TopicPartition topicPartition, final String storeName, final long startingOffset, final long endingOffset) {
}
@Override
public void onBatchRestored(final TopicPartition topicPartition, final String storeName, final long batchEndOffset, final long numRestored) {
}
@Override
public void onRestoreEnd(final TopicPartition topicPartition, final String storeName, final long totalRestored) {
restored.addAndGet(totalRestored);
}
});
kafkaStreams.start();
assertTrue(startupLatch.await(30, TimeUnit.SECONDS));
assertThat(restored.get(), equalTo((long) numberOfKeys - offsetLimitDelta * 2 - offsetCheckpointed * 2));
assertTrue(shutdownLatch.await(30, TimeUnit.SECONDS));
assertThat(numReceived.get(), equalTo(offsetLimitDelta * 2));
}
@Test
public void shouldRestoreStateFromChangelogTopic() throws Exception {
final String changelog = appId + "-store-changelog";
CLUSTER.createTopic(changelog, 2, 1);
final AtomicInteger numReceived = new AtomicInteger(0);
final StreamsBuilder builder = new StreamsBuilder();
final Properties props = props();
// restoring from 1000 to 5000, and then process from 5000 to 10000 on each of the two partitions
final int offsetCheckpointed = 1000;
createStateForRestoration(changelog, 0);
createStateForRestoration(inputStream, 10000);
final StateDirectory stateDirectory = new StateDirectory(new StreamsConfig(props), new MockTime(), true, false);
// note here the checkpointed offset is the last processed record's offset, so without control message we should write this offset - 1
new OffsetCheckpoint(new File(stateDirectory.getOrCreateDirectoryForTask(new TaskId(0, 0)), ".checkpoint"))
.write(Collections.singletonMap(new TopicPartition(changelog, 0), (long) offsetCheckpointed - 1));
new OffsetCheckpoint(new File(stateDirectory.getOrCreateDirectoryForTask(new TaskId(0, 1)), ".checkpoint"))
.write(Collections.singletonMap(new TopicPartition(changelog, 1), (long) offsetCheckpointed - 1));
final CountDownLatch startupLatch = new CountDownLatch(1);
final CountDownLatch shutdownLatch = new CountDownLatch(1);
builder.table(inputStream, Consumed.with(Serdes.Integer(), Serdes.Integer()), Materialized.as("store"))
.toStream()
.foreach((key, value) -> {
if (numReceived.incrementAndGet() == numberOfKeys) {
shutdownLatch.countDown();
}
});
kafkaStreams = new KafkaStreams(builder.build(), props);
kafkaStreams.setStateListener((newState, oldState) -> {
if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) {
startupLatch.countDown();
}
});
final AtomicLong restored = new AtomicLong(0);
kafkaStreams.setGlobalStateRestoreListener(new StateRestoreListener() {
@Override
public void onRestoreStart(final TopicPartition topicPartition, final String storeName, final long startingOffset, final long endingOffset) {
}
@Override
public void onBatchRestored(final TopicPartition topicPartition, final String storeName, final long batchEndOffset, final long numRestored) {
}
@Override
public void onRestoreEnd(final TopicPartition topicPartition, final String storeName, final long totalRestored) {
restored.addAndGet(totalRestored);
}
});
kafkaStreams.start();
assertTrue(startupLatch.await(30, TimeUnit.SECONDS));
assertThat(restored.get(), equalTo((long) numberOfKeys - 2 * offsetCheckpointed));
assertTrue(shutdownLatch.await(30, TimeUnit.SECONDS));
assertThat(numReceived.get(), equalTo(numberOfKeys));
}
@Test
public void shouldSuccessfullyStartWhenLoggingDisabled() throws InterruptedException {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<Integer, Integer> stream = builder.stream(inputStream);
stream.groupByKey()
.reduce(
(value1, value2) -> value1 + value2,
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as("reduce-store").withLoggingDisabled());
final CountDownLatch startupLatch = new CountDownLatch(1);
kafkaStreams = new KafkaStreams(builder.build(), props());
kafkaStreams.setStateListener((newState, oldState) -> {
if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) {
startupLatch.countDown();
}
});
kafkaStreams.start();
assertTrue(startupLatch.await(30, TimeUnit.SECONDS));
}
@SuppressWarnings("deprecation") // Old PAPI. Needs to be migrated.
@Test
public void shouldProcessDataFromStoresWithLoggingDisabled() throws InterruptedException {
IntegrationTestUtils.produceKeyValuesSynchronously(inputStream,
asList(KeyValue.pair(1, 1),
KeyValue.pair(2, 2),
KeyValue.pair(3, 3)),
TestUtils.producerConfig(CLUSTER.bootstrapServers(),
IntegerSerializer.class,
IntegerSerializer.class),
CLUSTER.time);
final KeyValueBytesStoreSupplier lruMapSupplier = Stores.lruMap(inputStream, 10);
final StoreBuilder<KeyValueStore<Integer, Integer>> storeBuilder = new KeyValueStoreBuilder<>(lruMapSupplier,
Serdes.Integer(),
Serdes.Integer(),
CLUSTER.time)
.withLoggingDisabled();
final StreamsBuilder streamsBuilder = new StreamsBuilder();
streamsBuilder.addStateStore(storeBuilder);
final KStream<Integer, Integer> stream = streamsBuilder.stream(inputStream);
final CountDownLatch processorLatch = new CountDownLatch(3);
stream.process(() -> new KeyValueStoreProcessor(inputStream, processorLatch), inputStream);
final Topology topology = streamsBuilder.build();
kafkaStreams = new KafkaStreams(topology, props());
final CountDownLatch latch = new CountDownLatch(1);
kafkaStreams.setStateListener((newState, oldState) -> {
if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) {
latch.countDown();
}
});
kafkaStreams.start();
latch.await(30, TimeUnit.SECONDS);
assertTrue(processorLatch.await(30, TimeUnit.SECONDS));
}
@Test
public void shouldRecycleStateFromStandbyTaskPromotedToActiveTaskAndNotRestore() throws Exception {
final StreamsBuilder builder = new StreamsBuilder();
builder.table(
inputStream,
Consumed.with(Serdes.Integer(), Serdes.Integer()), Materialized.as(getCloseCountingStore("store"))
);
createStateForRestoration(inputStream, 0);
final Properties props1 = props();
props1.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 1);
props1.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory(appId + "-1").getPath());
purgeLocalStreamsState(props1);
final KafkaStreams client1 = new KafkaStreams(builder.build(), props1);
final Properties props2 = props();
props2.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 1);
props2.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory(appId + "-2").getPath());
purgeLocalStreamsState(props2);
final KafkaStreams client2 = new KafkaStreams(builder.build(), props2);
final TrackingStateRestoreListener restoreListener = new TrackingStateRestoreListener();
client1.setGlobalStateRestoreListener(restoreListener);
startApplicationAndWaitUntilRunning(asList(client1, client2), Duration.ofSeconds(60));
waitForCompletion(client1, 1, 30 * 1000L);
waitForCompletion(client2, 1, 30 * 1000L);
waitForStandbyCompletion(client1, 1, 30 * 1000L);
waitForStandbyCompletion(client2, 1, 30 * 1000L);
// Sometimes the store happens to have already been closed sometime during startup, so just keep track
// of where it started and make sure it doesn't happen more times from there
final int initialStoreCloseCount = CloseCountingInMemoryStore.numStoresClosed();
final long initialNunRestoredCount = restoreListener.totalNumRestored();
client2.close();
waitForApplicationState(singletonList(client2), State.NOT_RUNNING, Duration.ofSeconds(60));
waitForApplicationState(singletonList(client1), State.REBALANCING, Duration.ofSeconds(60));
waitForApplicationState(singletonList(client1), State.RUNNING, Duration.ofSeconds(60));
waitForCompletion(client1, 1, 30 * 1000L);
waitForStandbyCompletion(client1, 1, 30 * 1000L);
assertThat(restoreListener.totalNumRestored(), CoreMatchers.equalTo(initialNunRestoredCount));
// After stopping instance 2 and letting instance 1 take over its tasks, we should have closed just two stores
// total: the active and standby tasks on instance 2
assertThat(CloseCountingInMemoryStore.numStoresClosed(), equalTo(initialStoreCloseCount + 2));
client1.close();
waitForApplicationState(singletonList(client2), State.NOT_RUNNING, Duration.ofSeconds(60));
assertThat(CloseCountingInMemoryStore.numStoresClosed(), CoreMatchers.equalTo(initialStoreCloseCount + 4));
}
private static KeyValueBytesStoreSupplier getCloseCountingStore(final String name) {
return new KeyValueBytesStoreSupplier() {
@Override
public String name() {
return name;
}
@Override
public KeyValueStore<Bytes, byte[]> get() {
return new CloseCountingInMemoryStore(name);
}
@Override
public String metricsScope() {
return "close-counting";
}
};
}
static class CloseCountingInMemoryStore extends InMemoryKeyValueStore {
static AtomicInteger numStoresClosed = new AtomicInteger(0);
CloseCountingInMemoryStore(final String name) {
super(name);
}
@Override
public void close() {
numStoresClosed.incrementAndGet();
super.close();
}
static int numStoresClosed() {
return numStoresClosed.get();
}
}
@SuppressWarnings("deprecation") // Old PAPI. Needs to be migrated.
public static class KeyValueStoreProcessor implements org.apache.kafka.streams.processor.Processor<Integer, Integer> {
private final String topic;
private final CountDownLatch processorLatch;
private KeyValueStore<Integer, Integer> store;
KeyValueStoreProcessor(final String topic, final CountDownLatch processorLatch) {
this.topic = topic;
this.processorLatch = processorLatch;
}
@SuppressWarnings("unchecked")
@Override
public void init(final ProcessorContext context) {
this.store = (KeyValueStore<Integer, Integer>) context.getStateStore(topic);
}
@Override
public void process(final Integer key, final Integer value) {
if (key != null) {
store.put(key, value);
processorLatch.countDown();
}
}
@Override
public void close() { }
}
private void createStateForRestoration(final String changelogTopic, final int startingOffset) {
final Properties producerConfig = new Properties();
producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
try (final KafkaProducer<Integer, Integer> producer =
new KafkaProducer<>(producerConfig, new IntegerSerializer(), new IntegerSerializer())) {
for (int i = 0; i < numberOfKeys; i++) {
final int offset = startingOffset + i;
producer.send(new ProducerRecord<>(changelogTopic, offset, offset));
}
}
}
private void setCommittedOffset(final String topic, final int limitDelta) {
final Properties consumerConfig = new Properties();
consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, appId);
consumerConfig.put(ConsumerConfig.CLIENT_ID_CONFIG, "commit-consumer");
consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class);
consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class);
final Consumer<Integer, Integer> consumer = new KafkaConsumer<>(consumerConfig);
final List<TopicPartition> partitions = asList(
new TopicPartition(topic, 0),
new TopicPartition(topic, 1));
consumer.assign(partitions);
consumer.seekToEnd(partitions);
for (final TopicPartition partition : partitions) {
final long position = consumer.position(partition);
consumer.seek(partition, position - limitDelta);
}
consumer.commitSync();
consumer.close();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.vfs2.provider.sftp;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Vector;
import org.apache.commons.vfs2.FileNotFoundException;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;
import org.apache.commons.vfs2.FileType;
import org.apache.commons.vfs2.NameScope;
import org.apache.commons.vfs2.RandomAccessContent;
import org.apache.commons.vfs2.VFS;
import org.apache.commons.vfs2.provider.AbstractFileName;
import org.apache.commons.vfs2.provider.AbstractFileObject;
import org.apache.commons.vfs2.provider.UriParser;
import org.apache.commons.vfs2.util.FileObjectUtils;
import org.apache.commons.vfs2.util.MonitorInputStream;
import org.apache.commons.vfs2.util.MonitorOutputStream;
import org.apache.commons.vfs2.util.RandomAccessMode;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.ChannelSftp.LsEntry;
import com.jcraft.jsch.SftpATTRS;
import com.jcraft.jsch.SftpException;
/**
* An SFTP file.
*
* @author <a href="http://commons.apache.org/vfs/team-list.html">Commons VFS team</a>
* @version $Revision$ $Date: 2005-10-14 19:59:47 +0200 (Fr, 14 Okt
* 2005) $
*/
public class SftpFileObject extends AbstractFileObject implements FileObject
{
private final SftpFileSystem fileSystem;
private SftpATTRS attrs;
private final String relPath;
private boolean inRefresh;
protected SftpFileObject(final AbstractFileName name,
final SftpFileSystem fileSystem) throws FileSystemException
{
super(name, fileSystem);
this.fileSystem = fileSystem;
relPath = UriParser.decode(fileSystem.getRootName().getRelativeName(
name));
}
/** @since 2.0 */
@Override
protected void doDetach() throws Exception
{
attrs = null;
}
/** @since 2.0 */
@Override
public void refresh() throws FileSystemException
{
if (!inRefresh)
{
try
{
inRefresh = true;
super.refresh();
try
{
attrs = null;
getType();
}
catch (IOException e)
{
throw new FileSystemException(e);
}
}
finally
{
inRefresh = false;
}
}
}
/**
* Determines the type of this file, returns null if the file does not
* exist.
*/
@Override
protected FileType doGetType() throws Exception
{
if (attrs == null)
{
statSelf();
}
if (attrs == null)
{
return FileType.IMAGINARY;
}
if ((attrs.getFlags() & SftpATTRS.SSH_FILEXFER_ATTR_PERMISSIONS) == 0)
{
throw new FileSystemException(
"vfs.provider.sftp/unknown-permissions.error");
}
if (attrs.isDir())
{
return FileType.FOLDER;
}
else
{
return FileType.FILE;
}
}
/**
* Called when the type or content of this file changes.
*/
@Override
protected void onChange() throws Exception
{
statSelf();
}
/**
* Fetches file attrs from server.
*/
private void statSelf() throws Exception
{
ChannelSftp channel = fileSystem.getChannel();
try
{
setStat(channel.stat(relPath));
}
catch (final SftpException e)
{
try
{
// maybe the channel has some problems, so recreate the channel and retry
if (e.id != ChannelSftp.SSH_FX_NO_SUCH_FILE)
{
channel.disconnect();
channel = fileSystem.getChannel();
setStat(channel.stat(relPath));
}
else
{
// Really does not exist
attrs = null;
}
}
catch (final SftpException e2)
{
// TODO - not strictly true, but jsch 0.1.2 does not give us
// enough info in the exception. Should be using:
// if ( e.id == ChannelSftp.SSH_FX_NO_SUCH_FILE )
// However, sometimes the exception has the correct id, and
// sometimes
// it does not. Need to look into why.
// Does not exist
attrs = null;
}
}
finally
{
fileSystem.putChannel(channel);
}
}
/**
* Set attrs from listChildrenResolved
*/
private void setStat(SftpATTRS attrs)
{
this.attrs = attrs;
}
/**
* Creates this file as a folder.
*/
@Override
protected void doCreateFolder() throws Exception
{
final ChannelSftp channel = fileSystem.getChannel();
try
{
channel.mkdir(relPath);
}
finally
{
fileSystem.putChannel(channel);
}
}
@Override
protected long doGetLastModifiedTime() throws Exception
{
if (attrs == null
|| (attrs.getFlags() & SftpATTRS.SSH_FILEXFER_ATTR_ACMODTIME) == 0)
{
throw new FileSystemException(
"vfs.provider.sftp/unknown-modtime.error");
}
return attrs.getMTime() * 1000L;
}
/**
* Sets the last modified time of this file. Is only called if
* {@link #doGetType} does not return {@link FileType#IMAGINARY}. <p/>
*
* @param modtime
* is modification time in milliseconds. SFTP protocol can send
* times with nanosecond precision but at the moment jsch send
* them with second precision.
*/
@Override
protected boolean doSetLastModifiedTime(final long modtime) throws Exception
{
final ChannelSftp channel = fileSystem.getChannel();
try
{
int newMTime = (int) (modtime / 1000L);
attrs.setACMODTIME(attrs.getATime(), newMTime);
channel.setStat(relPath, attrs);
}
finally
{
fileSystem.putChannel(channel);
}
return true;
}
/**
* Deletes the file.
*/
@Override
protected void doDelete() throws Exception
{
final ChannelSftp channel = fileSystem.getChannel();
try
{
if (getType() == FileType.FILE)
{
channel.rm(relPath);
}
else
{
channel.rmdir(relPath);
}
}
finally
{
fileSystem.putChannel(channel);
}
}
/**
* Rename the file.
*/
@Override
protected void doRename(FileObject newfile) throws Exception
{
final ChannelSftp channel = fileSystem.getChannel();
try
{
channel.rename(relPath, ((SftpFileObject) newfile).relPath);
}
finally
{
fileSystem.putChannel(channel);
}
}
/**
* Lists the children of this file.
*/
@Override
protected FileObject[] doListChildrenResolved() throws Exception
{
// List the contents of the folder
Vector<?> vector = null;
final ChannelSftp channel = fileSystem.getChannel();
try
{
// try the direct way to list the directory on the server to avoid too many roundtrips
vector = channel.ls(relPath);
}
catch (SftpException e)
{
String workingDirectory = null;
try
{
if (relPath != null)
{
workingDirectory = channel.pwd();
channel.cd(relPath);
}
}
catch (SftpException ex)
{
// VFS-210: seems not to be a directory
return null;
}
SftpException lsEx = null;
try
{
vector = channel.ls(".");
}
catch (SftpException ex)
{
lsEx = ex;
}
finally
{
try
{
if (relPath != null)
{
channel.cd(workingDirectory);
}
}
catch (SftpException xe)
{
throw new FileSystemException("vfs.provider.sftp/change-work-directory-back.error",
workingDirectory, lsEx);
}
}
if (lsEx != null)
{
throw lsEx;
}
}
finally
{
fileSystem.putChannel(channel);
}
if (vector == null)
{
throw new FileSystemException(
"vfs.provider.sftp/list-children.error");
}
// Extract the child names
final ArrayList<FileObject> children = new ArrayList<FileObject>();
for (@SuppressWarnings("unchecked") // OK because ChannelSftp.ls() is documented to return Vector<LsEntry>
Iterator<LsEntry> iterator = (Iterator<LsEntry>) vector.iterator(); iterator.hasNext();)
{
final LsEntry stat = iterator.next();
String name = stat.getFilename();
if (VFS.isUriStyle())
{
if (stat.getAttrs().isDir()
&& name.charAt(name.length() - 1) != '/')
{
name = name + "/";
}
}
if (name.equals(".") || name.equals("..") || name.equals("./")
|| name.equals("../"))
{
continue;
}
FileObject fo =
getFileSystem()
.resolveFile(
getFileSystem().getFileSystemManager().resolveName(
getName(), UriParser.encode(name),
NameScope.CHILD));
((SftpFileObject) FileObjectUtils.getAbstractFileObject(fo)).setStat(stat.getAttrs());
children.add(fo);
}
return children.toArray(new FileObject[children.size()]);
}
/**
* Lists the children of this file.
*/
@Override
protected String[] doListChildren() throws Exception
{
// use doListChildrenResolved for performance
return null;
}
/**
* Returns the size of the file content (in bytes).
*/
@Override
protected long doGetContentSize() throws Exception
{
if (attrs == null
|| (attrs.getFlags() & SftpATTRS.SSH_FILEXFER_ATTR_SIZE) == 0)
{
throw new FileSystemException(
"vfs.provider.sftp/unknown-size.error");
}
return attrs.getSize();
}
@Override
protected RandomAccessContent doGetRandomAccessContent(
final RandomAccessMode mode) throws Exception
{
return new SftpRandomAccessContent(this, mode);
}
/**
* Creates an input stream to read the file content from.
*/
InputStream getInputStream(long filePointer) throws IOException
{
final ChannelSftp channel = fileSystem.getChannel();
try
{
// hmmm - using the in memory method is soooo much faster ...
// TODO - Don't read the entire file into memory. Use the
// stream-based methods on ChannelSftp once they work properly final
// .... no stream based method with resume???
ByteArrayOutputStream outstr = new ByteArrayOutputStream();
try
{
channel.get(getName().getPathDecoded(), outstr, null,
ChannelSftp.RESUME, filePointer);
}
catch (SftpException e)
{
throw new FileSystemException(e);
}
outstr.close();
return new ByteArrayInputStream(outstr.toByteArray());
}
finally
{
fileSystem.putChannel(channel);
}
}
/**
* Creates an input stream to read the file content from.
*/
@Override
protected InputStream doGetInputStream() throws Exception
{
// VFS-113: avoid npe
synchronized (fileSystem)
{
final ChannelSftp channel = fileSystem.getChannel();
try
{
// return channel.get(getName().getPath());
// hmmm - using the in memory method is soooo much faster ...
// TODO - Don't read the entire file into memory. Use the
// stream-based methods on ChannelSftp once they work properly
/*
final ByteArrayOutputStream outstr = new ByteArrayOutputStream();
channel.get(relPath, outstr);
outstr.close();
return new ByteArrayInputStream(outstr.toByteArray());
*/
InputStream is;
try
{
// VFS-210: sftp allows to gather an input stream even from a directory and will
// fail on first read. So we need to check the type anyway
if (!getType().hasContent())
{
throw new FileSystemException("vfs.provider/read-not-file.error", getName());
}
is = channel.get(relPath);
}
catch (SftpException e)
{
if (e.id == ChannelSftp.SSH_FX_NO_SUCH_FILE)
{
throw new FileNotFoundException(getName());
}
throw new FileSystemException(e);
}
return new SftpInputStream(channel, is);
}
finally
{
// fileSystem.putChannel(channel);
}
}
}
/**
* Creates an output stream to write the file content to.
*/
@Override
protected OutputStream doGetOutputStream(boolean bAppend) throws Exception
{
// TODO - Don't write the entire file into memory. Use the stream-based
// methods on ChannelSftp once the work properly
/*
final ChannelSftp channel = fileSystem.getChannel();
return new SftpOutputStream(channel);
*/
final ChannelSftp channel = fileSystem.getChannel();
return new SftpOutputStream(channel, channel.put(relPath));
}
/**
* An InputStream that monitors for end-of-file.
*/
private class SftpInputStream extends MonitorInputStream
{
private final ChannelSftp channel;
public SftpInputStream(final ChannelSftp channel, final InputStream in)
{
super(in);
this.channel = channel;
}
/**
* Called after the stream has been closed.
*/
@Override
protected void onClose() throws IOException
{
fileSystem.putChannel(channel);
}
}
/**
* An OutputStream that wraps an sftp OutputStream, and closes the channel
* when the stream is closed.
*/
private class SftpOutputStream extends MonitorOutputStream
{
private final ChannelSftp channel;
public SftpOutputStream(final ChannelSftp channel, OutputStream out)
{
super(out);
this.channel = channel;
}
/**
* Called after this stream is closed.
*/
@Override
protected void onClose() throws IOException
{
fileSystem.putChannel(channel);
}
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* InstanceStatusDetailsSetType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.6 Built on : Aug 30, 2011 (10:01:01 CEST)
*/
package com.amazon.ec2;
/**
* InstanceStatusDetailsSetType bean class
*/
public class InstanceStatusDetailsSetType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = InstanceStatusDetailsSetType
Namespace URI = http://ec2.amazonaws.com/doc/2012-08-15/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2012-08-15/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for Item
* This was an Array!
*/
protected com.amazon.ec2.InstanceStatusDetailsSetItemType[] localItem ;
/* This tracker boolean wil be used to detect whether the user called the set method
* for this attribute. It will be used to determine whether to include this field
* in the serialized XML
*/
protected boolean localItemTracker = false ;
/**
* Auto generated getter method
* @return com.amazon.ec2.InstanceStatusDetailsSetItemType[]
*/
public com.amazon.ec2.InstanceStatusDetailsSetItemType[] getItem(){
return localItem;
}
/**
* validate the array for Item
*/
protected void validateItem(com.amazon.ec2.InstanceStatusDetailsSetItemType[] param){
}
/**
* Auto generated setter method
* @param param Item
*/
public void setItem(com.amazon.ec2.InstanceStatusDetailsSetItemType[] param){
validateItem(param);
if (param != null){
//update the setting tracker
localItemTracker = true;
} else {
localItemTracker = false;
}
this.localItem=param;
}
/**
* Auto generated add method for the array for convenience
* @param param com.amazon.ec2.InstanceStatusDetailsSetItemType
*/
public void addItem(com.amazon.ec2.InstanceStatusDetailsSetItemType param){
if (localItem == null){
localItem = new com.amazon.ec2.InstanceStatusDetailsSetItemType[]{};
}
//update the setting tracker
localItemTracker = true;
java.util.List list =
org.apache.axis2.databinding.utils.ConverterUtil.toList(localItem);
list.add(param);
this.localItem =
(com.amazon.ec2.InstanceStatusDetailsSetItemType[])list.toArray(
new com.amazon.ec2.InstanceStatusDetailsSetItemType[list.size()]);
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
InstanceStatusDetailsSetType.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2012-08-15/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":InstanceStatusDetailsSetType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"InstanceStatusDetailsSetType",
xmlWriter);
}
}
if (localItemTracker){
if (localItem!=null){
for (int i = 0;i < localItem.length;i++){
if (localItem[i] != null){
localItem[i].serialize(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","item"),
factory,xmlWriter);
} else {
// we don't have to do any thing since minOccures is zero
}
}
} else {
throw new org.apache.axis2.databinding.ADBException("item cannot be null!!");
}
}
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
if (localItemTracker){
if (localItem!=null) {
for (int i = 0;i < localItem.length;i++){
if (localItem[i] != null){
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/",
"item"));
elementList.add(localItem[i]);
} else {
// nothing to do
}
}
} else {
throw new org.apache.axis2.databinding.ADBException("item cannot be null!!");
}
}
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static InstanceStatusDetailsSetType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
InstanceStatusDetailsSetType object =
new InstanceStatusDetailsSetType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"InstanceStatusDetailsSetType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (InstanceStatusDetailsSetType)com.amazon.ec2.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
java.util.ArrayList list1 = new java.util.ArrayList();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","item").equals(reader.getName())){
// Process the array and step past its final element's end.
list1.add(com.amazon.ec2.InstanceStatusDetailsSetItemType.Factory.parse(reader));
//loop until we find a start element that is not part of this array
boolean loopDone1 = false;
while(!loopDone1){
// We should be at the end element, but make sure
while (!reader.isEndElement())
reader.next();
// Step out of this element
reader.next();
// Step to next element event.
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isEndElement()){
//two continuous end elements means we are exiting the xml structure
loopDone1 = true;
} else {
if (new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","item").equals(reader.getName())){
list1.add(com.amazon.ec2.InstanceStatusDetailsSetItemType.Factory.parse(reader));
}else{
loopDone1 = true;
}
}
}
// call the converter utility to convert and set the array
object.setItem((com.amazon.ec2.InstanceStatusDetailsSetItemType[])
org.apache.axis2.databinding.utils.ConverterUtil.convertToArray(
com.amazon.ec2.InstanceStatusDetailsSetItemType.class,
list1));
} // End of if for expected property start element
else {
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
package com.qualcomm.ftcrobotcontroller.opmodes;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorManager;
import android.hardware.SensorEventListener;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.util.Range;
import com.qualcomm.robotcore.hardware.Servo;
import com.qualcomm.robotcore.hardware.ColorSensor;
/**
* Created by Delta on 9/30/2015.
*/
public class DR_Tank_Test extends OpMode{
public float accelX = 0;
public float accelY = 0;
public float accelZ = 0;
public float deltaX;
public float deltaY;
public float deltaZ;
public boolean sweeperF;
public boolean sweeperR;
public boolean skeeball = false;
public boolean drive = true;
DcMotor motorLeftRear;
DcMotor motorRightRear;
DcMotor motorLeftFront;
DcMotor motorRightFront;
Servo leftLever;
Servo rightLever;
DcMotor motorWinchLeft;
DcMotor motorWinchRight;
DcMotor motorWinchHelp;
//DcMotor motorSweeper;
Servo plowLeft;
Servo plowRight;
Servo catLeft;
Servo winchAngle;
Servo armVert;
Servo armGrab;
//ColorSensor RGBSensor;
//ColorSensor colorSensor;
double armVertPosition = 0.988;
double armGrabPosition = 0.878;
double armVertDelta = 0.005;
double armGrabDelta = 0.005;
double leftLeverPosition = 0.019;
double rightLeverPosition = 0.878;
double leftLeverDelta = 0.01;
double rightLeverDelta = 0.01;
double catLeftPosition = 0.800;
double catLeftDelta = 0.002;
double catRightPosition = 0.005;
double catRightDelta = 0.002;
double winchAngleDelta = 0.0005;
double winchAnglePosition = .78;
//Servo plowInOut;
//Servo armColorSensor;
double plowDelta = 0.005;
double plowDeltaLeft = plowDelta;
double plowDeltaRight = -plowDelta;
double plowPositionLeft = 0.5;
double plowPositionRight = 0.5;
//double inOutPosition = 0.0;
//double inOutDelta = 0.005;
int rev = 3;
int count = 0;
//double armColorSensorPosition = 0.8;
boolean speed_mode;
//Initialize the Accelerometer
private SensorManager mSensorManager;
private Sensor accelerometer;
public DR_Tank_Test() {
}
public void init() {
motorLeftRear = hardwareMap.dcMotor.get("Drive_Left_Rear");
motorRightRear = hardwareMap.dcMotor.get("Drive_Right_Rear");
//motorLeftRear.setDirection(DcMotor.Direction.REVERSE);
motorLeftFront = hardwareMap.dcMotor.get("Drive_Left_Front");
motorRightFront = hardwareMap.dcMotor.get("Drive_Right_Front");
//motorLeftFront.setDirection(DcMotor.Direction.REVERSE);
motorWinchLeft = hardwareMap.dcMotor.get("Winch_Motor_Left");
motorWinchRight = hardwareMap.dcMotor.get("Winch_Motor_Right");
motorWinchHelp = hardwareMap.dcMotor.get("Winch_Help");
winchAngle = hardwareMap.servo.get("Winch_Angle");
winchAngle.setPosition(winchAnglePosition);
//motorSweeper = hardwareMap.dcMotor.get("Sweeper");
catLeft = hardwareMap.servo.get("CatLeft");
catLeft.setPosition(0.79);
catLeftPosition = 0.79;
leftLever = hardwareMap.servo.get("Left_Lever");
leftLever.setPosition(leftLeverPosition);
rightLever = hardwareMap.servo.get("Right_Lever");
rightLever.setPosition(rightLeverPosition);
plowLeft = hardwareMap.servo.get("Left_Plow");
plowRight = hardwareMap.servo.get("Right_Plow");
armVert = hardwareMap.servo.get("Vertical_Arm");
armVert.setPosition(armVertPosition);
armGrab = hardwareMap.servo.get("Grabber_Arm");
armGrab.setPosition(armGrabPosition);
plowLeft.setPosition(0.957);
plowRight.setPosition(0.047);
//plowInOut.setPosition(.455);
plowPositionLeft = 0.957;
plowPositionRight = 0.047;
speed_mode = true;
//mSensorManager = (SensorManager) hardwareMap.appContext.getSystemService(Context.SENSOR_SERVICE);
//accelerometer = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
//sweeperF = false;
//sweeperR = false;
}
public void start() {
//mSensorManager.registerListener(this, accelerometer, SensorManager.SENSOR_DELAY_NORMAL);
//}
//}
// @Override
//public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
public void loop() {
plowPositionLeft = Range.clip(plowPositionLeft, 0.1, 0.937);
plowPositionRight = Range.clip(plowPositionRight, 0.047, 0.867);
//inOutPosition = Range.clip(inOutPosition, 0.18, 0.78);
catLeftPosition = Range.clip(catLeftPosition, 0.184, 0.800);
//catRightPosition = Range.clip(catRightPosition, 0.09, 0.862);
winchAnglePosition = Range.clip(winchAnglePosition, 0.002, .997);
leftLeverPosition = Range.clip(leftLeverPosition, 0.076,0.924);
rightLeverPosition = Range.clip(rightLeverPosition, 0.076,0.878);
armGrabPosition = Range.clip(armGrabPosition, 0.01, 0.99);
armVertPosition = Range.clip(armVertPosition, 0.0313, 0.99);
double throttleLB = gamepad1.left_stick_y;
double throttleRB = gamepad1.right_stick_y;
double throttleLF = gamepad1.left_stick_y;
double throttleRF = gamepad1.right_stick_y;
double throttleWinch = gamepad2.right_stick_y;
if (gamepad1.y) {
speed_mode = true;
}
if (gamepad1.a) {
speed_mode = false;
}
throttleLB = Range.clip(throttleLB, -1.0, 1.0);
throttleRB = Range.clip(throttleRB, -1.0, 1.0);
throttleLF = Range.clip(throttleLF, -1.0, 1.0);
throttleRF = Range.clip(throttleRF, -1.0, 1.0);
throttleWinch = Range.clip(throttleWinch, -1.0, 1.0);
motorWinchLeft.setPower(-throttleWinch);
motorWinchRight.setPower(throttleWinch);
motorWinchHelp.setPower(-throttleWinch/3);
//rightLever.setPower(-rightLeverThrottle);
if(drive)
{
motorLeftRear.setPower(-throttleLB);
motorLeftFront.setPower(-throttleLF);
motorRightRear.setPower(-throttleRB);
motorRightFront.setPower(-throttleRF);
}
else
{
motorLeftRear.setPower(throttleLB);
motorLeftFront.setPower(throttleLF);
motorRightRear.setPower(throttleRB);
motorRightFront.setPower(throttleRF);
}
if (gamepad2.left_trigger > .5) {
catLeftPosition -= catLeftDelta;
}
if (gamepad2.left_bumper) {
catLeftPosition += catLeftDelta;
}
if (gamepad2.y) {
rightLeverPosition -= rightLeverDelta;
}
if (gamepad2.a) {
rightLeverPosition += rightLeverDelta;
}
if (gamepad2.dpad_up) {
leftLeverPosition += leftLeverDelta;
}
if (gamepad2.dpad_down) {
leftLeverPosition -= leftLeverDelta;
}
if (gamepad2.right_trigger > .2) {
winchAnglePosition += winchAngleDelta;
}
if (gamepad2.right_bumper) {
winchAnglePosition -= winchAngleDelta;
}
//if (count > 10)
//{
// if(gamepad1.a) {
// sweeperF = !sweeperF;
// count = 0;
// }
// if(gamepad1.b) {
// sweeperR = !sweeperR;
// count = 0;
// }
//}
if (gamepad1.right_bumper)
{
plowPositionRight += plowDeltaRight;
}
if (gamepad1.right_trigger > .5)
{
plowPositionRight -= plowDeltaRight;
}
if (gamepad1.left_bumper)
{
plowPositionLeft += plowDeltaLeft;
}
if (gamepad1.left_trigger > .5)
{
plowPositionLeft -= plowDeltaLeft;
}
if(gamepad2.left_stick_y > 0.2)
{
armVertPosition += armVertDelta;
}
if(gamepad2.left_stick_y < -0.2)
{
armVertPosition -= armVertDelta;
}
if(gamepad2.x)
{
armGrabPosition += armGrabDelta;
}
if(gamepad2.b)
{
armGrabPosition -= armGrabDelta;
}
if(gamepad2.guide)
{
skeeball = !skeeball;
}
if(skeeball)
{
leftLeverDelta = 0.075;
rightLeverDelta = 0.075;
}
else
{
leftLeverDelta = 0.01;
rightLeverDelta = 0.01;
}
if(gamepad1.guide)
{
drive = !drive;
}
plowLeft.setPosition(plowPositionLeft);
plowRight.setPosition(plowPositionRight);
catLeft.setPosition(catLeftPosition);
winchAngle.setPosition(winchAnglePosition);
leftLever.setPosition(leftLeverPosition);
rightLever.setPosition(rightLeverPosition);
armGrab.setPosition(armGrabPosition);
armVert.setPosition(armVertPosition);
//if(sweeperF)
//{
// motorSweeper.setPower(1.0);
//}
// else
//{
// motorSweeper.setPower(0.0);
//}
//if(!sweeperF)
//{
// if(sweeperR)
// {
// motorSweeper.setPower(-1.0);
// }
// else
// {
// motorSweeper.setPower(0.0);
// }
//}
//telemetry.addData("Left Rear:"
//, +motorLeftRear.getPower()
//+ motorLeftRear.getCurrentPosition());
//telemetry.addData("Right Rear:"
//, +motorRightRear.getPower()
// + motorRightRear.getCurrentPosition());
//telemetry.addData("Left Front:"
//, + motorLeftFront.getPower ()
/// + motorLeftFront.getCurrentPosition());
//telemetry.addData("Right Front:"
//, + motorRightFront.getPower()
// + motorRightFront.getCurrentPosition()); */
//telemetry.addData("Left Plow:", plowLeft.getPosition());
//telemetry.addData("Right Plow:", plowRight.getPosition());
//telemetry.addData("InOut Plow:", plowInOut.getPosition());
count++;
telemetry.addData("Left Lever", leftLever.getPosition());
telemetry.addData("Right Lever", rightLever.getPosition());
telemetry.addData("CatLeft:", catLeft.getPosition());
telemetry.addData("Left Plow", plowLeft.getPosition());
telemetry.addData("Right Plow", plowRight.getPosition());
telemetry.addData("Arm Grabber", armGrab.getPosition());
telemetry.addData("Arm Vertical", armVert.getPosition());
telemetry.addData("Winch Angle", winchAngle.getPosition());
telemetry.addData("rev", rev);
telemetry.addData("skeeball", skeeball);
telemetry.addData("drive", drive);
//telemetry.addData("RBG",RGBSensor.argb());
//telemetry.addData("X Accelerometer", accelX);
//telemetry.addData("Y Accelerometer", accelY);
//telemetry.addData("Z Accelerometer", accelZ);
}
//possible issue
//@Override
//public void onSensorChanged(SensorEvent event)
//{
// telemetry.addData ("ASensor", "Sensor Hit");
//if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER)
//{
//get the change of the x,y,z values of the accelerometer
//deltaX = Math.abs(lastX - event.values[0]);
//deltaY = Math.abs(lastY - event.values[1]);
//deltaZ = Math.abs(lastZ - event.values[2]);
//if the change is below 2, it is just plain noise
//if (deltaX < 2)
// deltaX = 0;
//if (deltaY < 2)
// deltaY = 0;
//if (deltaZ < 2)
// deltaZ = 0;
//accelX = deltaX;
//accelY = deltaY;
//accelZ = deltaZ;
//}
// }
double scaleInputRearHigh(double dVal) {
double[] scaleArray = {0.0, 0.05, 0.09, 0.10, 0.12, 0.15, 0.18, 0.24,
0.30, 0.36, 0.43, 0.50, 0.60, 0.72, 0.85, 1.00, 1.00};
// get the corresponding index for the scaleInput array.
int index = (int) (dVal * 16.0);
if (index < 0) {
index = -index;
} else if (index > 16) {
index = 16;
}
double dScale = 0.0;
if (dVal < 0) {
dScale = -scaleArray[index];
} else {
dScale = scaleArray[index];
}
return dScale;
}
double scaleInputRearLow(double dVal){
double[] scaleArray = {0.0, 0.0375, 0.0675, 0.075, 0.09, 0.1125, 0.135, 0.18,
0.225, 0.27, 0.3225, 0.375, 0.45, 0.54, 0.6375, 0.75, 0.75};
int index = (int) (dVal * 16.0);
if (index < 0) {
index = -index;
}
else if (index > 16) {
index = 16;
}
double dScale = 0.0;
if (dVal < 0) {
dScale = -scaleArray[index];
}
else {
dScale = scaleArray[index];
}
return dScale;
}
double scaleInputFrontHigh(double dVal){
double[] scaleArray = {0.0, 0.025, 0.045, 0.05, 0.06, 0.075, 0.09, 0.12,
0.15, 0.18, 0.215, 0.25, 0.30, 0.36, 0.425, 0.5, 0.5};
int index = (int) (dVal * 16.0);
if (index < 0) {
index = -index;
}
else if (index > 16) {
index = 16;
}
double dScale = 0.0;
if (dVal < 0) {
dScale = -scaleArray[index];
}
else {
dScale = scaleArray[index];
}
return dScale;
}
double scaleInputFrontLow(double dVal){
double[] scaleArray = {0.0, 0.01875, 0.03375, 0.0375, 0.045, 0.05625, 0.0675, 0.09,
0.1125, 0.135, 0.16125, 0.1875, 0.225, 0.27, 0.31875, 0.375, 0.375};
int index = (int) (dVal * 16.0);
if (index < 0) {
index = -index;
}
else if (index > 16) {
index = 16;
}
double dScale = 0.0;
if (dVal < 0) {
dScale = -scaleArray[index];
}
else {
dScale = scaleArray[index];
}
return dScale;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.stream.kafka;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import kafka.serializer.Decoder;
import org.apache.ignite.IgniteDataStreamer;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.internal.util.typedef.internal.A;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.stream.StreamAdapter;
/**
* Server that subscribes to topic messages from Kafka broker and streams its to key-value pairs into
* {@link IgniteDataStreamer} instance.
* <p>
* Uses Kafka's High Level Consumer API to read messages from Kafka.
*
* @see <a href="https://cwiki.apache.org/confluence/display/KAFKA/Consumer+Group+Example">Consumer Consumer Group
* Example</a>
*/
public class KafkaStreamer<T, K, V> extends StreamAdapter<T, K, V> {
/** Retry timeout. */
private static final long DFLT_RETRY_TIMEOUT = 10000;
/** Logger. */
private IgniteLogger log;
/** Executor used to submit kafka streams. */
private ExecutorService executor;
/** Topic. */
private String topic;
/** Number of threads to process kafka streams. */
private int threads;
/** Kafka consumer config. */
private ConsumerConfig consumerCfg;
/** Key decoder. */
private Decoder<K> keyDecoder;
/** Value decoder. */
private Decoder<V> valDecoder;
/** Kafka consumer connector. */
private ConsumerConnector consumer;
/** Retry timeout. */
private long retryTimeout = DFLT_RETRY_TIMEOUT;
/** Stopped. */
private volatile boolean stopped;
/**
* Sets the topic name.
*
* @param topic Topic name.
*/
public void setTopic(String topic) {
this.topic = topic;
}
/**
* Sets the threads.
*
* @param threads Number of threads.
*/
public void setThreads(int threads) {
this.threads = threads;
}
/**
* Sets the consumer config.
*
* @param consumerCfg Consumer configuration.
*/
public void setConsumerConfig(ConsumerConfig consumerCfg) {
this.consumerCfg = consumerCfg;
}
/**
* Sets the key decoder.
*
* @param keyDecoder Key decoder.
*/
public void setKeyDecoder(Decoder<K> keyDecoder) {
this.keyDecoder = keyDecoder;
}
/**
* Sets the value decoder.
*
* @param valDecoder Value decoder.
*/
public void setValueDecoder(Decoder<V> valDecoder) {
this.valDecoder = valDecoder;
}
/**
* Sets the retry timeout.
*
* @param retryTimeout Retry timeout.
*/
public void setRetryTimeout(long retryTimeout) {
A.ensure(retryTimeout > 0, "retryTimeout > 0");
this.retryTimeout = retryTimeout;
}
/**
* Starts streamer.
*
* @throws IgniteException If failed.
*/
public void start() {
A.notNull(getStreamer(), "streamer");
A.notNull(getIgnite(), "ignite");
A.notNull(topic, "topic");
A.notNull(keyDecoder, "key decoder");
A.notNull(valDecoder, "value decoder");
A.notNull(consumerCfg, "kafka consumer config");
A.ensure(threads > 0, "threads > 0");
log = getIgnite().log();
consumer = kafka.consumer.Consumer.createJavaConsumerConnector(consumerCfg);
Map<String, Integer> topicCntMap = new HashMap<>();
topicCntMap.put(topic, threads);
Map<String, List<KafkaStream<K, V>>> consumerMap =
consumer.createMessageStreams(topicCntMap, keyDecoder, valDecoder);
List<KafkaStream<K, V>> streams = consumerMap.get(topic);
// Now launch all the consumer threads.
executor = Executors.newFixedThreadPool(threads);
stopped = false;
// Now create an object to consume the messages.
for (final KafkaStream<K, V> stream : streams) {
executor.submit(new Runnable() {
@Override public void run() {
while (!stopped) {
try {
for (ConsumerIterator<K, V> it = stream.iterator(); it.hasNext() && !stopped; ) {
MessageAndMetadata<K, V> msg = it.next();
try {
getStreamer().addData(msg.key(), msg.message());
}
catch (Exception e) {
U.warn(log, "Message is ignored due to an error [msg=" + msg + ']', e);
}
}
}
catch (Exception e) {
U.warn(log, "Message can't be consumed from stream. Retry after " +
retryTimeout + " ms.", e);
try {
Thread.sleep(retryTimeout);
}
catch (InterruptedException ie) {
// No-op.
}
}
}
}
});
}
}
/**
* Stops streamer.
*/
public void stop() {
stopped = true;
if (consumer != null)
consumer.shutdown();
if (executor != null) {
executor.shutdown();
try {
if (!executor.awaitTermination(5000, TimeUnit.MILLISECONDS))
if (log.isDebugEnabled())
log.debug("Timed out waiting for consumer threads to shut down, exiting uncleanly.");
}
catch (InterruptedException e) {
if (log.isDebugEnabled())
log.debug("Interrupted during shutdown, exiting uncleanly.");
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gateway;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* The primary shard allocator allocates primary shard that were not created as
* a result of an API to a node that held them last to be recovered.
*/
public abstract class PrimaryShardAllocator extends AbstractComponent {
private static final Function<String, String> INITIAL_SHARDS_PARSER = (value) -> {
switch (value) {
case "quorum":
case "quorum-1":
case "half":
case "one":
case "full":
case "full-1":
case "all-1":
case "all":
return value;
default:
Integer.parseInt(value); // it can be parsed that's all we care here?
return value;
}
};
public static final Setting<String> NODE_INITIAL_SHARDS_SETTING = new Setting<>("gateway.initial_shards", (settings) -> settings.get("gateway.local.initial_shards", "quorum"), INITIAL_SHARDS_PARSER, true, Setting.Scope.CLUSTER);
@Deprecated
public static final Setting<String> INDEX_RECOVERY_INITIAL_SHARDS_SETTING = new Setting<>("index.recovery.initial_shards", (settings) -> NODE_INITIAL_SHARDS_SETTING.get(settings) , INITIAL_SHARDS_PARSER, true, Setting.Scope.INDEX);
public PrimaryShardAllocator(Settings settings) {
super(settings);
logger.debug("using initial_shards [{}]", NODE_INITIAL_SHARDS_SETTING.get(settings));
}
public boolean allocateUnassigned(RoutingAllocation allocation) {
boolean changed = false;
final RoutingNodes routingNodes = allocation.routingNodes();
final MetaData metaData = routingNodes.metaData();
final RoutingNodes.UnassignedShards.UnassignedIterator unassignedIterator = routingNodes.unassigned().iterator();
while (unassignedIterator.hasNext()) {
final ShardRouting shard = unassignedIterator.next();
if (shard.primary() == false) {
continue;
}
final IndexMetaData indexMetaData = metaData.index(shard.getIndex());
// don't go wild here and create a new IndexSetting object for every shard this could cause a lot of garbage
// on cluster restart if we allocate a boat load of shards
if (shard.allocatedPostIndexCreate(indexMetaData) == false) {
// when we create a fresh index
continue;
}
final AsyncShardFetch.FetchResult<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> shardState = fetchData(shard, allocation);
if (shardState.hasData() == false) {
logger.trace("{}: ignoring allocation, still fetching shard started state", shard);
allocation.setHasPendingAsyncFetch();
unassignedIterator.removeAndIgnore();
continue;
}
final Set<String> lastActiveAllocationIds = indexMetaData.activeAllocationIds(shard.id());
final boolean snapshotRestore = shard.restoreSource() != null;
final boolean recoverOnAnyNode = recoverOnAnyNode(indexMetaData);
final NodesAndVersions nodesAndVersions;
final boolean enoughAllocationsFound;
if (lastActiveAllocationIds.isEmpty()) {
assert Version.indexCreated(indexMetaData.getSettings()).before(Version.V_3_0_0) : "trying to allocated a primary with an empty allocation id set, but index is new";
// when we load an old index (after upgrading cluster) or restore a snapshot of an old index
// fall back to old version-based allocation mode
// Note that once the shard has been active, lastActiveAllocationIds will be non-empty
nodesAndVersions = buildNodesAndVersions(shard, snapshotRestore || recoverOnAnyNode, allocation.getIgnoreNodes(shard.shardId()), shardState);
if (snapshotRestore || recoverOnAnyNode) {
enoughAllocationsFound = nodesAndVersions.allocationsFound > 0;
} else {
enoughAllocationsFound = isEnoughVersionBasedAllocationsFound(shard, indexMetaData, nodesAndVersions);
}
logger.debug("[{}][{}]: version-based allocation for pre-{} index found {} allocations of {}, highest version: [{}]", shard.index(), shard.id(), Version.V_3_0_0, nodesAndVersions.allocationsFound, shard, nodesAndVersions.highestVersion);
} else {
assert lastActiveAllocationIds.isEmpty() == false;
// use allocation ids to select nodes
nodesAndVersions = buildAllocationIdBasedNodes(shard, snapshotRestore || recoverOnAnyNode,
allocation.getIgnoreNodes(shard.shardId()), lastActiveAllocationIds, shardState);
enoughAllocationsFound = nodesAndVersions.allocationsFound > 0;
logger.debug("[{}][{}]: found {} allocations of {} based on allocation ids: [{}]", shard.index(), shard.id(), nodesAndVersions.allocationsFound, shard, lastActiveAllocationIds);
}
if (enoughAllocationsFound == false){
if (snapshotRestore) {
// let BalancedShardsAllocator take care of allocating this shard
logger.debug("[{}][{}]: missing local data, will restore from [{}]", shard.index(), shard.id(), shard.restoreSource());
} else if (recoverOnAnyNode) {
// let BalancedShardsAllocator take care of allocating this shard
logger.debug("[{}][{}]: missing local data, recover from any node", shard.index(), shard.id());
} else {
// we can't really allocate, so ignore it and continue
unassignedIterator.removeAndIgnore();
logger.debug("[{}][{}]: not allocating, number_of_allocated_shards_found [{}]", shard.index(), shard.id(), nodesAndVersions.allocationsFound);
}
continue;
}
final NodesToAllocate nodesToAllocate = buildNodesToAllocate(shard, allocation, nodesAndVersions.nodes);
if (nodesToAllocate.yesNodes.isEmpty() == false) {
DiscoveryNode node = nodesToAllocate.yesNodes.get(0);
logger.debug("[{}][{}]: allocating [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, node);
changed = true;
unassignedIterator.initialize(node.id(), nodesAndVersions.highestVersion, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE);
} else if (nodesToAllocate.throttleNodes.isEmpty() == true && nodesToAllocate.noNodes.isEmpty() == false) {
DiscoveryNode node = nodesToAllocate.noNodes.get(0);
logger.debug("[{}][{}]: forcing allocating [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, node);
changed = true;
unassignedIterator.initialize(node.id(), nodesAndVersions.highestVersion, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE);
} else {
// we are throttling this, but we have enough to allocate to this node, ignore it for now
logger.debug("[{}][{}]: throttling allocation [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, nodesToAllocate.throttleNodes);
unassignedIterator.removeAndIgnore();
}
}
return changed;
}
/**
* Builds a list of nodes. If matchAnyShard is set to false, only nodes that have an allocation id matching
* lastActiveAllocationIds are added to the list. Otherwise, any node that has a shard is added to the list, but
* entries with matching allocation id are always at the front of the list.
*/
protected NodesAndVersions buildAllocationIdBasedNodes(ShardRouting shard, boolean matchAnyShard, Set<String> ignoreNodes,
Set<String> lastActiveAllocationIds, AsyncShardFetch.FetchResult<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> shardState) {
LinkedList<DiscoveryNode> matchingNodes = new LinkedList<>();
LinkedList<DiscoveryNode> nonMatchingNodes = new LinkedList<>();
long highestVersion = -1;
for (TransportNodesListGatewayStartedShards.NodeGatewayStartedShards nodeShardState : shardState.getData().values()) {
DiscoveryNode node = nodeShardState.getNode();
String allocationId = nodeShardState.allocationId();
if (ignoreNodes.contains(node.id())) {
continue;
}
if (nodeShardState.storeException() == null) {
if (allocationId == null && nodeShardState.version() != -1) {
// old shard with no allocation id, assign dummy value so that it gets added below in case of matchAnyShard
allocationId = "_n/a_";
}
logger.trace("[{}] on node [{}] has allocation id [{}] of shard", shard, nodeShardState.getNode(), allocationId);
} else {
logger.trace("[{}] on node [{}] has allocation id [{}] but the store can not be opened, treating as no allocation id", nodeShardState.storeException(), shard, nodeShardState.getNode(), allocationId);
allocationId = null;
}
if (allocationId != null) {
if (lastActiveAllocationIds.contains(allocationId)) {
if (nodeShardState.primary()) {
matchingNodes.addFirst(node);
} else {
matchingNodes.addLast(node);
}
highestVersion = Math.max(highestVersion, nodeShardState.version());
} else if (matchAnyShard) {
if (nodeShardState.primary()) {
nonMatchingNodes.addFirst(node);
} else {
nonMatchingNodes.addLast(node);
}
highestVersion = Math.max(highestVersion, nodeShardState.version());
}
}
}
List<DiscoveryNode> nodes = new ArrayList<>();
nodes.addAll(matchingNodes);
nodes.addAll(nonMatchingNodes);
if (logger.isTraceEnabled()) {
logger.trace("{} candidates for allocation: {}", shard, nodes.stream().map(DiscoveryNode::name).collect(Collectors.joining(", ")));
}
return new NodesAndVersions(nodes, nodes.size(), highestVersion);
}
/**
* used by old version-based allocation
*/
private boolean isEnoughVersionBasedAllocationsFound(ShardRouting shard, IndexMetaData indexMetaData, NodesAndVersions nodesAndVersions) {
// check if the counts meets the minimum set
int requiredAllocation = 1;
// if we restore from a repository one copy is more then enough
String initialShards = INDEX_RECOVERY_INITIAL_SHARDS_SETTING.get(indexMetaData.getSettings(), settings);
if ("quorum".equals(initialShards)) {
if (indexMetaData.getNumberOfReplicas() > 1) {
requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2) + 1;
}
} else if ("quorum-1".equals(initialShards) || "half".equals(initialShards)) {
if (indexMetaData.getNumberOfReplicas() > 2) {
requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2);
}
} else if ("one".equals(initialShards)) {
requiredAllocation = 1;
} else if ("full".equals(initialShards) || "all".equals(initialShards)) {
requiredAllocation = indexMetaData.getNumberOfReplicas() + 1;
} else if ("full-1".equals(initialShards) || "all-1".equals(initialShards)) {
if (indexMetaData.getNumberOfReplicas() > 1) {
requiredAllocation = indexMetaData.getNumberOfReplicas();
}
} else {
requiredAllocation = Integer.parseInt(initialShards);
}
return nodesAndVersions.allocationsFound >= requiredAllocation;
}
/**
* Split the list of nodes to lists of yes/no/throttle nodes based on allocation deciders
*/
private NodesToAllocate buildNodesToAllocate(ShardRouting shard, RoutingAllocation allocation, List<DiscoveryNode> nodes) {
List<DiscoveryNode> yesNodes = new ArrayList<>();
List<DiscoveryNode> throttledNodes = new ArrayList<>();
List<DiscoveryNode> noNodes = new ArrayList<>();
for (DiscoveryNode discoNode : nodes) {
RoutingNode node = allocation.routingNodes().node(discoNode.id());
if (node == null) {
continue;
}
Decision decision = allocation.deciders().canAllocate(shard, node, allocation);
if (decision.type() == Decision.Type.THROTTLE) {
throttledNodes.add(discoNode);
} else if (decision.type() == Decision.Type.NO) {
noNodes.add(discoNode);
} else {
yesNodes.add(discoNode);
}
}
return new NodesToAllocate(Collections.unmodifiableList(yesNodes), Collections.unmodifiableList(throttledNodes), Collections.unmodifiableList(noNodes));
}
/**
* Builds a list of nodes. If matchAnyShard is set to false, only nodes that have the highest shard version
* are added to the list. Otherwise, any node that has a shard is added to the list, but entries with highest
* version are always at the front of the list.
*/
NodesAndVersions buildNodesAndVersions(ShardRouting shard, boolean matchAnyShard, Set<String> ignoreNodes,
AsyncShardFetch.FetchResult<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> shardState) {
final Map<DiscoveryNode, Long> nodesWithVersion = new HashMap<>();
int numberOfAllocationsFound = 0;
long highestVersion = -1;
for (TransportNodesListGatewayStartedShards.NodeGatewayStartedShards nodeShardState : shardState.getData().values()) {
long version = nodeShardState.version();
DiscoveryNode node = nodeShardState.getNode();
if (ignoreNodes.contains(node.id())) {
continue;
}
// -1 version means it does not exists, which is what the API returns, and what we expect to
if (nodeShardState.storeException() == null) {
logger.trace("[{}] on node [{}] has version [{}] of shard", shard, nodeShardState.getNode(), version);
} else {
// when there is an store exception, we disregard the reported version and assign it as -1 (same as shard does not exist)
logger.trace("[{}] on node [{}] has version [{}] but the store can not be opened, treating as version -1", nodeShardState.storeException(), shard, nodeShardState.getNode(), version);
version = -1;
}
if (version != -1) {
numberOfAllocationsFound++;
// If we've found a new "best" candidate, clear the
// current candidates and add it
if (version > highestVersion) {
highestVersion = version;
if (matchAnyShard == false) {
nodesWithVersion.clear();
}
nodesWithVersion.put(node, version);
} else if (version == highestVersion) {
// If the candidate is the same, add it to the
// list, but keep the current candidate
nodesWithVersion.put(node, version);
}
}
}
// Now that we have a map of nodes to versions along with the
// number of allocations found (and not ignored), we need to sort
// it so the node with the highest version is at the beginning
List<DiscoveryNode> nodesWithHighestVersion = new ArrayList<>();
nodesWithHighestVersion.addAll(nodesWithVersion.keySet());
CollectionUtil.timSort(nodesWithHighestVersion, new Comparator<DiscoveryNode>() {
@Override
public int compare(DiscoveryNode o1, DiscoveryNode o2) {
return Long.compare(nodesWithVersion.get(o2), nodesWithVersion.get(o1));
}
});
if (logger.isTraceEnabled()) {
StringBuilder sb = new StringBuilder("[");
for (DiscoveryNode n : nodesWithVersion.keySet()) {
sb.append("[").append(n.getName()).append("]").append(" -> ").append(nodesWithVersion.get(n)).append(", ");
}
sb.append("]");
logger.trace("{} candidates for allocation: {}", shard, sb.toString());
}
return new NodesAndVersions(Collections.unmodifiableList(nodesWithHighestVersion), numberOfAllocationsFound, highestVersion);
}
/**
* Return {@code true} if the index is configured to allow shards to be
* recovered on any node
*/
private boolean recoverOnAnyNode(IndexMetaData metaData) {
return (IndexMetaData.isOnSharedFilesystem(metaData.getSettings()) || IndexMetaData.isOnSharedFilesystem(this.settings))
&& IndexMetaData.INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING.get(metaData.getSettings(), this.settings);
}
protected abstract AsyncShardFetch.FetchResult<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> fetchData(ShardRouting shard, RoutingAllocation allocation);
static class NodesAndVersions {
public final List<DiscoveryNode> nodes;
public final int allocationsFound;
public final long highestVersion;
public NodesAndVersions(List<DiscoveryNode> nodes, int allocationsFound, long highestVersion) {
this.nodes = nodes;
this.allocationsFound = allocationsFound;
this.highestVersion = highestVersion;
}
}
static class NodesToAllocate {
final List<DiscoveryNode> yesNodes;
final List<DiscoveryNode> throttleNodes;
final List<DiscoveryNode> noNodes;
public NodesToAllocate(List<DiscoveryNode> yesNodes, List<DiscoveryNode> throttleNodes, List<DiscoveryNode> noNodes) {
this.yesNodes = yesNodes;
this.throttleNodes = throttleNodes;
this.noNodes = noNodes;
}
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.io.MorePaths;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.ImmutableFlavor;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.FakeSourcePath;
import com.facebook.buck.rules.PathSourcePath;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TestCellBuilder;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.args.SourcePathArg;
import com.facebook.buck.rules.coercer.FrameworkPath;
import com.facebook.buck.rules.coercer.PatternMatchedCollection;
import com.facebook.buck.rules.coercer.SourceList;
import com.facebook.buck.shell.GenruleBuilder;
import com.facebook.buck.testutil.AllExistingProjectFilesystem;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.testutil.TargetGraphFactory;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.io.File;
import java.nio.file.Path;
import java.util.Map;
import java.util.regex.Pattern;
public class PrebuiltCxxLibraryDescriptionTest {
private static final BuildTarget TARGET = BuildTargetFactory.newInstance("//:target");
private static final BuildTarget TARGET_TWO = BuildTargetFactory.newInstance("//two/:target");
private static final CxxPlatform CXX_PLATFORM = PrebuiltCxxLibraryBuilder.createDefaultPlatform();
private static Path getStaticLibraryPath(PrebuiltCxxLibraryDescription.Arg arg) {
String libDir = arg.libDir.or("lib");
String libName = arg.libName.or(TARGET.getShortName());
return TARGET.getBasePath().resolve(libDir).resolve(
String.format("lib%s.a", libName));
}
private static Path getStaticPicLibraryPath(PrebuiltCxxLibraryDescription.Arg arg) {
String libDir = arg.libDir.or("lib");
String libName = arg.libName.or(TARGET.getShortName());
return TARGET.getBasePath().resolve(libDir).resolve(
String.format("lib%s_pic.a", libName));
}
private static Path getSharedLibraryPath(PrebuiltCxxLibraryDescription.Arg arg) {
String libDir = arg.libDir.or("lib");
String libName = arg.libName.or(TARGET.getShortName());
return TARGET.getBasePath().resolve(libDir).resolve(
String.format("lib%s.%s", libName, CXX_PLATFORM.getSharedLibraryExtension()));
}
private static String getSharedLibrarySoname(PrebuiltCxxLibraryDescription.Arg arg) {
String libName = arg.libName.or(TARGET.getShortName());
return arg.soname.or(
String.format("lib%s.%s", libName, CXX_PLATFORM.getSharedLibraryExtension()));
}
private static ImmutableList<Path> getIncludeDirs(PrebuiltCxxLibraryDescription.Arg arg) {
return FluentIterable
.from(arg.includeDirs.or(ImmutableList.of("include")))
.transform(
new Function<String, Path>() {
@Override
public Path apply(String input) {
return TARGET.getBasePath().resolve(input);
}
})
.toList();
}
private static ImmutableSet<BuildTarget> getInputRules(BuildRule buildRule) {
return ImmutableSet.of(
BuildTarget.builder()
.from(buildRule.getBuildTarget())
.addFlavors(CXX_PLATFORM.getFlavor())
.addFlavors(CxxDescriptionEnhancer.EXPORTED_HEADER_SYMLINK_TREE_FLAVOR)
.build());
}
private static ImmutableSet<Path> getHeaderNames(Iterable<CxxHeaders> includes) {
ImmutableSet.Builder<Path> names = ImmutableSet.builder();
for (CxxHeaders headers : includes) {
CxxSymlinkTreeHeaders symlinkTreeHeaders = (CxxSymlinkTreeHeaders) headers;
names.addAll(symlinkTreeHeaders.getNameToPathMap().keySet());
}
return names.build();
}
@Test
public void createBuildRuleDefault() throws Exception {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
PrebuiltCxxLibraryBuilder libBuilder = new PrebuiltCxxLibraryBuilder(TARGET);
TargetGraph targetGraph = TargetGraphFactory.newInstance(libBuilder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
PrebuiltCxxLibrary lib = (PrebuiltCxxLibrary) libBuilder
.build(resolver, filesystem, targetGraph);
PrebuiltCxxLibraryDescription.Arg arg = libBuilder.build().getConstructorArg();
// Verify the preprocessable input is as expected.
CxxPreprocessorInput expectedCxxPreprocessorInput = CxxPreprocessorInput.builder()
.addAllSystemIncludeRoots(getIncludeDirs(arg))
.build();
assertThat(
lib.getCxxPreprocessorInput(
CXX_PLATFORM,
HeaderVisibility.PUBLIC),
Matchers.equalTo(expectedCxxPreprocessorInput));
// Verify static native linkable input.
NativeLinkableInput expectedStaticLinkableInput = NativeLinkableInput.of(
ImmutableList.<Arg>of(
new SourcePathArg(
pathResolver,
new PathSourcePath(filesystem, getStaticLibraryPath(arg)))),
ImmutableSet.<FrameworkPath>of(),
ImmutableSet.<FrameworkPath>of());
assertEquals(
expectedStaticLinkableInput,
lib.getNativeLinkableInput(CXX_PLATFORM, Linker.LinkableDepType.STATIC));
// Verify shared native linkable input.
NativeLinkableInput expectedSharedLinkableInput = NativeLinkableInput.of(
ImmutableList.<Arg>of(
new SourcePathArg(
pathResolver,
new PathSourcePath(filesystem, getSharedLibraryPath(arg)))),
ImmutableSet.<FrameworkPath>of(),
ImmutableSet.<FrameworkPath>of());
assertEquals(
expectedSharedLinkableInput,
lib.getNativeLinkableInput(CXX_PLATFORM, Linker.LinkableDepType.SHARED));
}
@Test
public void headerOnly() throws Exception {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
PrebuiltCxxLibraryBuilder libBuilder = new PrebuiltCxxLibraryBuilder(TARGET)
.setHeaderOnly(true);
TargetGraph targetGraph = TargetGraphFactory.newInstance(libBuilder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibrary lib = (PrebuiltCxxLibrary) libBuilder
.build(resolver, filesystem, targetGraph);
PrebuiltCxxLibraryDescription.Arg arg = libBuilder.build().getConstructorArg();
// Verify the preprocessable input is as expected.
CxxPreprocessorInput expectedCxxPreprocessorInput =
CxxPreprocessorInput.builder()
.addAllSystemIncludeRoots(getIncludeDirs(arg))
.build();
assertThat(
lib.getCxxPreprocessorInput(CXX_PLATFORM, HeaderVisibility.PUBLIC),
Matchers.equalTo(expectedCxxPreprocessorInput));
// Verify static native linkable input.
NativeLinkableInput expectedStaticLinkableInput = NativeLinkableInput.of(
ImmutableList.<Arg>of(),
ImmutableSet.<FrameworkPath>of(),
ImmutableSet.<FrameworkPath>of());
assertEquals(
expectedStaticLinkableInput,
lib.getNativeLinkableInput(CXX_PLATFORM, Linker.LinkableDepType.STATIC));
// Verify shared native linkable input.
NativeLinkableInput expectedSharedLinkableInput = NativeLinkableInput.of(
ImmutableList.<Arg>of(),
ImmutableSet.<FrameworkPath>of(),
ImmutableSet.<FrameworkPath>of());
assertEquals(
expectedSharedLinkableInput,
lib.getNativeLinkableInput(CXX_PLATFORM, Linker.LinkableDepType.SHARED));
}
@Test
public void createBuildRuleExternal() throws Exception {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
PrebuiltCxxLibraryBuilder libBuilder = new PrebuiltCxxLibraryBuilder(TARGET)
.setProvided(true);
TargetGraph targetGraph = TargetGraphFactory.newInstance(libBuilder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
PrebuiltCxxLibrary lib = (PrebuiltCxxLibrary) libBuilder
.build(resolver, filesystem, targetGraph);
PrebuiltCxxLibraryDescription.Arg arg = libBuilder.build().getConstructorArg();
// Verify the preprocessable input is as expected.
CxxPreprocessorInput expectedCxxPreprocessorInput = CxxPreprocessorInput.builder()
.addAllSystemIncludeRoots(getIncludeDirs(arg))
.build();
assertThat(
lib.getCxxPreprocessorInput(CXX_PLATFORM, HeaderVisibility.PUBLIC),
Matchers.equalTo(expectedCxxPreprocessorInput));
// Verify shared native linkable input.
NativeLinkableInput expectedSharedLinkableInput = NativeLinkableInput.of(
ImmutableList.<Arg>of(
new SourcePathArg(
pathResolver,
new PathSourcePath(filesystem, getSharedLibraryPath(arg)))),
ImmutableSet.<FrameworkPath>of(),
ImmutableSet.<FrameworkPath>of());
assertEquals(
expectedSharedLinkableInput,
lib.getNativeLinkableInput(CXX_PLATFORM, Linker.LinkableDepType.SHARED));
}
@Test
public void missingSharedLibsAreAutoBuilt() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
PrebuiltCxxLibraryBuilder libBuilder = new PrebuiltCxxLibraryBuilder(TARGET);
TargetGraph targetGraph = TargetGraphFactory.newInstance(libBuilder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
PrebuiltCxxLibrary lib = (PrebuiltCxxLibrary) libBuilder
.build(resolver, filesystem, targetGraph);
NativeLinkableInput nativeLinkableInput = lib.getNativeLinkableInput(
CXX_PLATFORM,
Linker.LinkableDepType.SHARED);
BuildRule rule =
FluentIterable.from(nativeLinkableInput.getArgs())
.transformAndConcat(Arg.getDepsFunction(pathResolver))
.toList()
.get(0);
assertTrue(rule instanceof CxxLink);
}
@Test
public void missingSharedLibsAreNotAutoBuiltForHeaderOnlyRules() throws Exception {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
ProjectFilesystem filesystem = new FakeProjectFilesystem();
PrebuiltCxxLibraryBuilder libBuilder = new PrebuiltCxxLibraryBuilder(TARGET)
.setHeaderOnly(true);
TargetGraph targetGraph = TargetGraphFactory.newInstance(libBuilder.build());
PrebuiltCxxLibrary lib = (PrebuiltCxxLibrary) libBuilder
.build(resolver, filesystem, targetGraph);
NativeLinkableInput nativeLinkableInput = lib.getNativeLinkableInput(
CXX_PLATFORM,
Linker.LinkableDepType.SHARED);
assertThat(
FluentIterable.from(nativeLinkableInput.getArgs())
.transformAndConcat(Arg.getDepsFunction(pathResolver))
.toList(),
Matchers.empty());
}
@Test
public void addsLibsToAndroidPackageableCollector() throws Exception {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
PrebuiltCxxLibraryBuilder libBuilder = new PrebuiltCxxLibraryBuilder(TARGET);
TargetGraph targetGraph = TargetGraphFactory.newInstance(libBuilder.build());
PrebuiltCxxLibrary lib = (PrebuiltCxxLibrary) libBuilder
.build(resolver, filesystem, targetGraph);
PrebuiltCxxLibraryDescription.Arg arg = libBuilder.build().getConstructorArg();
assertEquals(
ImmutableMap.<String, SourcePath>of(
getSharedLibrarySoname(arg),
new PathSourcePath(filesystem, getSharedLibraryPath(arg))),
lib.getSharedLibraries(CXX_PLATFORM));
}
@Test
public void locationMacro() throws NoSuchBuildTargetException {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
Function<Optional<String>, Path> cellRoots = TestCellBuilder.createCellRoots(filesystem);
Optional<String> libName = Optional.of("test");
Optional<String> libDir = Optional.of("$(location //other:gen_lib)/");
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
BuildTarget genTarget = BuildTargetFactory.newInstance("//other:gen_lib");
GenruleBuilder genruleBuilder = GenruleBuilder
.newGenruleBuilder(genTarget)
.setOut("lib_dir");
BuildRule genRule = genruleBuilder.build(resolver);
CxxPlatform platform =
CxxPlatformUtils.DEFAULT_PLATFORM
.withFlavor(ImmutableFlavor.of("PLATFORM1"));
Path path = Preconditions.checkNotNull(genRule.getPathToOutput()).toAbsolutePath();
final SourcePath staticLibraryPath = PrebuiltCxxLibraryDescription.getStaticLibraryPath(
TARGET,
cellRoots,
filesystem,
resolver,
platform,
libDir,
libName);
assertEquals(
TARGET.getBasePath().resolve(String.format("%s/libtest.a", path)),
pathResolver.getAbsolutePath(staticLibraryPath));
}
@Test
public void goodPathNoLocation() {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
CxxPlatform platform =
CxxPlatformUtils.DEFAULT_PLATFORM
.withFlavor(ImmutableFlavor.of("PLATFORM1"));
final SourcePath staticLibraryPath = PrebuiltCxxLibraryDescription.getStaticLibraryPath(
TARGET_TWO,
TestCellBuilder.createCellRoots(filesystem),
filesystem,
resolver,
platform,
Optional.of("lib"),
Optional.<String>absent());
assertThat(
MorePaths.pathWithUnixSeparators(pathResolver.getAbsolutePath(staticLibraryPath)),
Matchers.containsString(String.format("two/%s/libtarget.a", "lib")));
}
@Test
public void findDepsFromParamsWithLocation() throws NoSuchBuildTargetException {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
BuildTarget genTarget = BuildTargetFactory.newInstance("//other:gen_lib");
GenruleBuilder genruleBuilder = GenruleBuilder
.newGenruleBuilder(genTarget)
.setOut("lib_dir");
BuildRule genrule = genruleBuilder.build(resolver);
PrebuiltCxxLibraryBuilder builder = new PrebuiltCxxLibraryBuilder(TARGET);
builder.setSoname("test");
builder.setLibDir("$(location //other:gen_lib)");
PrebuiltCxxLibrary lib = (PrebuiltCxxLibrary) builder.build(resolver);
Iterable<BuildTarget> implicit = builder.findImplicitDeps();
assertEquals(1, Iterables.size(implicit));
assertTrue(Iterables.contains(implicit, genTarget));
assertThat(
lib.getDeps(),
Matchers.contains(genrule));
}
@Test
public void findDepsFromParamsWithNone() throws NoSuchBuildTargetException {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibraryBuilder builder = new PrebuiltCxxLibraryBuilder(TARGET);
builder.setSoname("test");
builder.setLibDir("lib");
builder.build(resolver);
assertEquals(0, Iterables.size(builder.findImplicitDeps()));
}
@Test
public void findDepsFromParamsWithPlatform() throws NoSuchBuildTargetException {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibraryBuilder builder = new PrebuiltCxxLibraryBuilder(TARGET);
builder.setSoname("test");
builder.setLibDir("$(platform)");
builder.build(resolver);
assertEquals(0, Iterables.size(builder.findImplicitDeps()));
}
@Test
public void platformMacro() {
Optional<String> libDir = Optional.of("libs/$(platform)");
Optional<String> libName = Optional.of("test-$(platform)");
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
Function<Optional<String>, Path> cellRoots = TestCellBuilder.createCellRoots(filesystem);
CxxPlatform platform1 =
CxxPlatformUtils.DEFAULT_PLATFORM
.withFlavor(ImmutableFlavor.of("PLATFORM1"));
CxxPlatform platform2 =
CxxPlatformUtils.DEFAULT_PLATFORM
.withFlavor(ImmutableFlavor.of("PLATFORM2"));
assertEquals(
filesystem.resolve(
TARGET.getBasePath()
.resolve(
String.format(
"libs/PLATFORM1/libtest-PLATFORM1.%s",
platform1.getSharedLibraryExtension()))),
pathResolver.getAbsolutePath(PrebuiltCxxLibraryDescription.getSharedLibraryPath(
TARGET,
cellRoots,
filesystem,
resolver,
platform1,
libDir,
libName)));
assertEquals(
filesystem.resolve(TARGET.getBasePath()
.resolve("libs/PLATFORM1/libtest-PLATFORM1.a")),
pathResolver.getAbsolutePath(PrebuiltCxxLibraryDescription.getStaticLibraryPath(
TARGET,
cellRoots,
filesystem,
resolver,
platform1,
libDir,
libName)));
assertEquals(
filesystem.resolve(TARGET.getBasePath()
.resolve(
String.format(
"libs/PLATFORM2/libtest-PLATFORM2.%s",
platform2.getSharedLibraryExtension()))),
pathResolver.getAbsolutePath(PrebuiltCxxLibraryDescription.getSharedLibraryPath(
TARGET,
cellRoots,
filesystem,
resolver,
platform2,
libDir,
libName)));
assertEquals(
filesystem.resolve(TARGET.getBasePath()
.resolve("libs/PLATFORM2/libtest-PLATFORM2.a")),
pathResolver.getAbsolutePath(PrebuiltCxxLibraryDescription.getStaticLibraryPath(
TARGET,
cellRoots,
filesystem,
resolver,
platform2,
libDir,
libName)));
}
@Test
public void exportedHeaders() throws Exception {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
PrebuiltCxxLibraryBuilder libBuilder = new PrebuiltCxxLibraryBuilder(TARGET)
.setExportedHeaders(
SourceList.ofNamedSources(
ImmutableSortedMap.<String, SourcePath>of(
"foo.h",
new FakeSourcePath("foo.h"))));
TargetGraph targetGraph = TargetGraphFactory.newInstance(libBuilder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
PrebuiltCxxLibrary lib = (PrebuiltCxxLibrary) libBuilder
.build(resolver, filesystem, targetGraph);
// Verify the preprocessable input is as expected.
CxxPreprocessorInput input = lib.getCxxPreprocessorInput(CXX_PLATFORM, HeaderVisibility.PUBLIC);
assertThat(
getHeaderNames(input.getIncludes()),
Matchers.hasItem(filesystem.getRootPath().getFileSystem().getPath("foo.h")));
assertThat(
ImmutableSortedSet.copyOf(input.getDeps(resolver, pathResolver)),
Matchers.equalTo(resolver.getAllRules(getInputRules(lib))));
}
@Test
public void exportedPlatformHeaders() throws Exception {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
PrebuiltCxxLibraryBuilder libBuilder = new PrebuiltCxxLibraryBuilder(TARGET)
.setExportedPlatformHeaders(
PatternMatchedCollection.<SourceList>builder()
.add(
Pattern.compile(CXX_PLATFORM.getFlavor().toString()),
SourceList.ofNamedSources(
ImmutableSortedMap.<String, SourcePath>of(
"foo.h",
new FakeSourcePath("foo.h"))))
.add(
Pattern.compile("DO NOT MATCH ANYTNING"),
SourceList.ofNamedSources(
ImmutableSortedMap.<String, SourcePath>of(
"bar.h",
new FakeSourcePath("bar.h"))))
.build());
TargetGraph targetGraph = TargetGraphFactory.newInstance(libBuilder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
PrebuiltCxxLibrary lib = (PrebuiltCxxLibrary) libBuilder
.build(resolver, filesystem, targetGraph);
// Verify the preprocessable input is as expected.
CxxPreprocessorInput input = lib.getCxxPreprocessorInput(CXX_PLATFORM, HeaderVisibility.PUBLIC);
assertThat(
getHeaderNames(input.getIncludes()),
Matchers.hasItem(filesystem.getRootPath().getFileSystem().getPath("foo.h")));
assertThat(
getHeaderNames(input.getIncludes()),
Matchers.not(Matchers.hasItem(filesystem.getRootPath().getFileSystem().getPath("bar.h"))));
assertThat(
ImmutableSortedSet.copyOf(input.getDeps(resolver, pathResolver)),
Matchers.equalTo(resolver.getAllRules(getInputRules(lib))));
}
@Test
public void testBuildSharedWithDep() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
CxxPlatform platform = CxxLibraryBuilder.createDefaultPlatform();
BuildRule genSrc =
GenruleBuilder.newGenruleBuilder(BuildTargetFactory.newInstance("//:gen_libx"))
.setOut("gen_libx")
.setCmd("something")
.build(resolver, filesystem);
filesystem.writeContentsToPath(
"class Test {}",
new File(Preconditions.checkNotNull(genSrc.getPathToOutput()).toString(), "libx.so")
.toPath());
BuildTarget target = BuildTargetFactory.newInstance("//:x")
.withFlavors(platform.getFlavor(), CxxDescriptionEnhancer.SHARED_FLAVOR);
PrebuiltCxxLibraryBuilder builder = new PrebuiltCxxLibraryBuilder(target)
.setLibName("x")
.setLibDir("$(location //:gen_libx)");
CxxLink lib = (CxxLink) builder.build(resolver, filesystem);
assertNotNull(lib);
assertThat(lib.getDeps(), Matchers.contains(genSrc));
}
@Test
public void headerNamespace() throws Exception {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
PrebuiltCxxLibraryBuilder libBuilder = new PrebuiltCxxLibraryBuilder(TARGET)
.setHeaderNamespace("hello")
.setExportedHeaders(
SourceList.ofUnnamedSources(
ImmutableSortedSet.<SourcePath>of(new FakeSourcePath("foo.h"))));
TargetGraph targetGraph = TargetGraphFactory.newInstance(libBuilder.build());
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibrary lib = (PrebuiltCxxLibrary) libBuilder
.build(resolver, filesystem, targetGraph);
// Verify the preprocessable input is as expected.
CxxPreprocessorInput input = lib.getCxxPreprocessorInput(CXX_PLATFORM, HeaderVisibility.PUBLIC);
assertThat(
getHeaderNames(input.getIncludes()),
Matchers.contains(filesystem.getRootPath().getFileSystem().getPath("hello", "foo.h")));
}
@Test
public void staticPicLibsUseCorrectPath() throws Exception {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
PrebuiltCxxLibraryBuilder libBuilder = new PrebuiltCxxLibraryBuilder(TARGET);
TargetGraph targetGraph = TargetGraphFactory.newInstance(libBuilder.build());
PrebuiltCxxLibrary lib = (PrebuiltCxxLibrary) libBuilder
.build(resolver, filesystem, targetGraph);
NativeLinkableInput nativeLinkableInput =
lib.getNativeLinkableInput(
CXX_PLATFORM,
Linker.LinkableDepType.STATIC_PIC);
assertThat(
Arg.stringify(nativeLinkableInput.getArgs()).get(0),
Matchers.endsWith(
getStaticPicLibraryPath(libBuilder.build().getConstructorArg()).toString()));
}
@Test
public void missingStaticPicLibsUseStaticLibs() throws Exception {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
ProjectFilesystem filesystem = new FakeProjectFilesystem();
PrebuiltCxxLibraryBuilder libBuilder = new PrebuiltCxxLibraryBuilder(TARGET);
filesystem.touch(filesystem.getAbsolutifier().apply(
getStaticPicLibraryPath(libBuilder.build().getConstructorArg())));
TargetGraph targetGraph = TargetGraphFactory.newInstance(libBuilder.build());
PrebuiltCxxLibrary lib = (PrebuiltCxxLibrary) libBuilder
.build(resolver, filesystem, targetGraph);
NativeLinkableInput nativeLinkableInput = lib.getNativeLinkableInput(
CXX_PLATFORM,
Linker.LinkableDepType.STATIC_PIC);
assertThat(
Arg.stringify(nativeLinkableInput.getArgs()).get(0),
Matchers.endsWith(
getStaticPicLibraryPath(
libBuilder.build().getConstructorArg()).toString()));
}
@Test
public void forceStatic() throws Exception {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
ProjectFilesystem filesystem = new FakeProjectFilesystem();
PrebuiltCxxLibrary prebuiltCxxLibrary =
(PrebuiltCxxLibrary) new PrebuiltCxxLibraryBuilder(TARGET)
.setForceStatic(true)
.build(resolver, filesystem);
NativeLinkableInput nativeLinkableInput =
prebuiltCxxLibrary.getNativeLinkableInput(
CxxPlatformUtils.DEFAULT_PLATFORM,
Linker.LinkableDepType.STATIC);
assertThat(
Arg.stringify(nativeLinkableInput.getArgs()).get(0),
Matchers.endsWith(".a"));
assertThat(
prebuiltCxxLibrary.getSharedLibraries(
CxxPlatformUtils.DEFAULT_PLATFORM)
.entrySet(),
Matchers.empty());
}
@Test
public void exportedLinkerFlagsAreUsedToBuildSharedLibrary() throws Exception {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
BuildTarget target =
BuildTarget.builder(BuildTargetFactory.newInstance("//:lib"))
.addFlavors(CxxDescriptionEnhancer.SHARED_FLAVOR)
.addFlavors(CxxPlatformUtils.DEFAULT_PLATFORM.getFlavor())
.build();
CxxLink cxxLink =
(CxxLink) new PrebuiltCxxLibraryBuilder(target)
.setExportedLinkerFlags(ImmutableList.of("--some-flag"))
.setForceStatic(true)
.build(resolver);
assertThat(
Arg.stringify(cxxLink.getArgs()),
Matchers.hasItem("--some-flag"));
}
@Test
public void nativeLinkableDeps() throws Exception {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibrary dep =
(PrebuiltCxxLibrary) new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:dep"))
.build(resolver);
PrebuiltCxxLibrary rule =
(PrebuiltCxxLibrary) new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:r"))
.setDeps(ImmutableSortedSet.of(dep.getBuildTarget()))
.build(resolver);
assertThat(
rule.getNativeLinkableDeps(CxxLibraryBuilder.createDefaultPlatform()),
Matchers.<NativeLinkable>contains(dep));
assertThat(
ImmutableList.copyOf(
rule.getNativeLinkableExportedDeps(CxxLibraryBuilder.createDefaultPlatform())),
Matchers.<NativeLinkable>empty());
}
@Test
public void nativeLinkableExportedDeps() throws Exception {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibrary dep =
(PrebuiltCxxLibrary) new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:dep"))
.build(resolver);
PrebuiltCxxLibrary rule =
(PrebuiltCxxLibrary) new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:r"))
.setExportedDeps(ImmutableSortedSet.of(dep.getBuildTarget()))
.build(resolver);
assertThat(
ImmutableList.copyOf(rule.getNativeLinkableDeps(CxxLibraryBuilder.createDefaultPlatform())),
Matchers.<NativeLinkable>empty());
assertThat(
rule.getNativeLinkableExportedDeps(CxxLibraryBuilder.createDefaultPlatform()),
Matchers.<NativeLinkable>contains(dep));
}
@Test
public void includesDirs() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
PrebuiltCxxLibraryBuilder prebuiltCxxLibraryBuilder =
new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:r"))
.setIncludeDirs(ImmutableList.of("include"));
BuildRuleResolver resolver =
new BuildRuleResolver(
TargetGraphFactory.newInstance(prebuiltCxxLibraryBuilder.build()),
new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibrary rule =
(PrebuiltCxxLibrary) prebuiltCxxLibraryBuilder.build(resolver, filesystem);
assertThat(
rule.getCxxPreprocessorInput(CxxPlatformUtils.DEFAULT_PLATFORM, HeaderVisibility.PUBLIC)
.getIncludes(),
Matchers.<CxxHeaders>contains(
CxxHeadersDir.of(
CxxPreprocessables.IncludeType.SYSTEM,
new PathSourcePath(
filesystem,
rule.getBuildTarget().getBasePath().resolve("include")))));
}
@Test
public void ruleWithoutHeadersDoesNotUseSymlinkTree() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
PrebuiltCxxLibraryBuilder prebuiltCxxLibraryBuilder =
new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:rule"))
.setIncludeDirs(ImmutableList.<String>of());
BuildRuleResolver resolver =
new BuildRuleResolver(
TargetGraphFactory.newInstance(prebuiltCxxLibraryBuilder.build()),
new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
PrebuiltCxxLibrary rule =
(PrebuiltCxxLibrary) prebuiltCxxLibraryBuilder.build(resolver, filesystem);
CxxPreprocessorInput input =
rule.getCxxPreprocessorInput(CxxPlatformUtils.DEFAULT_PLATFORM, HeaderVisibility.PUBLIC);
assertThat(
getHeaderNames(input.getIncludes()),
Matchers.<Path>empty());
assertThat(
input.getSystemIncludeRoots(),
Matchers.<Path>empty());
assertThat(
ImmutableList.copyOf(input.getDeps(resolver, pathResolver)),
Matchers.<BuildRule>empty());
}
@Test
public void linkWithoutSoname() throws Exception {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
PrebuiltCxxLibraryBuilder prebuiltCxxLibraryBuilder =
new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:rule"))
.setLinkWithoutSoname(true);
BuildRuleResolver resolver =
new BuildRuleResolver(
TargetGraphFactory.newInstance(prebuiltCxxLibraryBuilder.build()),
new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibrary rule =
(PrebuiltCxxLibrary) prebuiltCxxLibraryBuilder.build(resolver, filesystem);
NativeLinkableInput input =
rule.getNativeLinkableInput(CXX_PLATFORM, Linker.LinkableDepType.SHARED);
assertThat(
Arg.stringify(input.getArgs()),
Matchers.contains(
"-L" + filesystem.resolve(rule.getBuildTarget().getBasePath()).resolve("lib"),
"-lrule"));
}
@Test
public void missingStaticLibIsNotASharedNativeLinkTargetSoname() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
PrebuiltCxxLibraryBuilder prebuiltCxxLibraryBuilder =
new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:rule"));
BuildRuleResolver resolver =
new BuildRuleResolver(
TargetGraphFactory.newInstance(prebuiltCxxLibraryBuilder.build()),
new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibrary rule =
(PrebuiltCxxLibrary) prebuiltCxxLibraryBuilder.build(resolver, filesystem);
assertFalse(rule.getSharedNativeLinkTarget(CXX_PLATFORM).isPresent());
}
@Test
public void providedLibIsNotASharedNativeLinkTargetSoname() throws Exception {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
PrebuiltCxxLibraryBuilder prebuiltCxxLibraryBuilder =
new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:rule"))
.setProvided(true);
BuildRuleResolver resolver =
new BuildRuleResolver(
TargetGraphFactory.newInstance(prebuiltCxxLibraryBuilder.build()),
new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibrary rule =
(PrebuiltCxxLibrary) prebuiltCxxLibraryBuilder.build(resolver, filesystem);
assertFalse(rule.getSharedNativeLinkTarget(CXX_PLATFORM).isPresent());
}
@Test
public void existingStaticLibIsASharedNativeLinkTargetSoname() throws Exception {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
PrebuiltCxxLibraryBuilder prebuiltCxxLibraryBuilder =
new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:rule"));
BuildRuleResolver resolver =
new BuildRuleResolver(
TargetGraphFactory.newInstance(prebuiltCxxLibraryBuilder.build()),
new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibrary rule =
(PrebuiltCxxLibrary) prebuiltCxxLibraryBuilder.build(resolver, filesystem);
assertTrue(rule.getSharedNativeLinkTarget(CXX_PLATFORM).isPresent());
}
@Test
public void sharedNativeLinkTargetSoname() throws Exception {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
PrebuiltCxxLibraryBuilder prebuiltCxxLibraryBuilder =
new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:rule"))
.setSoname("libsoname.so");
BuildRuleResolver resolver =
new BuildRuleResolver(
TargetGraphFactory.newInstance(prebuiltCxxLibraryBuilder.build()),
new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibrary rule =
(PrebuiltCxxLibrary) prebuiltCxxLibraryBuilder.build(resolver, filesystem);
assertThat(
rule.getSharedNativeLinkTarget(CXX_PLATFORM).get()
.getSharedNativeLinkTargetLibraryName(CXX_PLATFORM),
Matchers.equalTo(Optional.of("libsoname.so")));
}
@Test
public void sharedNativeLinkTargetDeps() throws Exception {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibrary dep =
(PrebuiltCxxLibrary) new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:dep"))
.build(resolver, filesystem);
PrebuiltCxxLibrary exportedDep =
(PrebuiltCxxLibrary) new PrebuiltCxxLibraryBuilder(
BuildTargetFactory.newInstance("//:exported_dep"))
.build(resolver, filesystem);
PrebuiltCxxLibrary rule =
(PrebuiltCxxLibrary) new PrebuiltCxxLibraryBuilder(
BuildTargetFactory.newInstance("//:rule"))
.setExportedDeps(
ImmutableSortedSet.of(dep.getBuildTarget(), exportedDep.getBuildTarget()))
.build(resolver, filesystem);
assertThat(
ImmutableList.copyOf(
rule.getSharedNativeLinkTarget(CXX_PLATFORM).get()
.getSharedNativeLinkTargetDeps(CXX_PLATFORM)),
Matchers.<NativeLinkable>hasItems(dep, exportedDep));
}
@Test
public void sharedNativeLinkTargetInput() throws Exception {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
PrebuiltCxxLibraryBuilder ruleBuilder =
new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:rule"))
.setExportedLinkerFlags(ImmutableList.of("--exported-flag"));
BuildRuleResolver resolver =
new BuildRuleResolver(
TargetGraphFactory.newInstance(ruleBuilder.build()),
new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibrary rule = (PrebuiltCxxLibrary) ruleBuilder.build(resolver, filesystem);
NativeLinkableInput input =
rule.getSharedNativeLinkTarget(CXX_PLATFORM).get()
.getSharedNativeLinkTargetInput(CxxPlatformUtils.DEFAULT_PLATFORM);
assertThat(
Arg.stringify(input.getArgs()),
Matchers.hasItems("--exported-flag"));
}
@Test
public void missingStaticLibPrefersSharedLinking() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
PrebuiltCxxLibraryBuilder prebuiltCxxLibraryBuilder =
new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:rule"));
BuildRuleResolver resolver =
new BuildRuleResolver(
TargetGraphFactory.newInstance(prebuiltCxxLibraryBuilder.build()),
new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibrary rule =
(PrebuiltCxxLibrary) prebuiltCxxLibraryBuilder.build(resolver, filesystem);
assertThat(
rule.getPreferredLinkage(CXX_PLATFORM),
Matchers.equalTo(NativeLinkable.Linkage.SHARED));
}
@Test
public void providedDoNotReturnSharedLibs() throws Exception {
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
PrebuiltCxxLibraryBuilder prebuiltCxxLibraryBuilder =
new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:rule"))
.setProvided(true);
BuildRuleResolver resolver =
new BuildRuleResolver(
TargetGraphFactory.newInstance(prebuiltCxxLibraryBuilder.build()),
new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibrary rule =
(PrebuiltCxxLibrary) prebuiltCxxLibraryBuilder.build(resolver, filesystem);
assertThat(
rule.getSharedLibraries(CXX_PLATFORM).entrySet(),
Matchers.<Map.Entry<String, SourcePath>>empty());
}
@Test
public void headerOnlyLibPrefersAnyLinking() throws Exception {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
PrebuiltCxxLibraryBuilder prebuiltCxxLibraryBuilder =
new PrebuiltCxxLibraryBuilder(BuildTargetFactory.newInstance("//:rule"))
.setHeaderOnly(true);
BuildRuleResolver resolver =
new BuildRuleResolver(
TargetGraphFactory.newInstance(prebuiltCxxLibraryBuilder.build()),
new DefaultTargetNodeToBuildRuleTransformer());
PrebuiltCxxLibrary rule =
(PrebuiltCxxLibrary) prebuiltCxxLibraryBuilder.build(resolver, filesystem);
assertThat(
rule.getPreferredLinkage(CXX_PLATFORM),
Matchers.equalTo(NativeLinkable.Linkage.ANY));
}
}
| |
/*L
* Copyright Duke Comprehensive Cancer Center
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/catrip/LICENSE.txt for details.
*/
package edu.pitt.cabig.cae.domain.general;
import java.sql.Date;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.hibernate.*;
import org.hibernate.cfg.*;
import edu.duke.catrip.cae.domain.general.Accession;
import edu.duke.catrip.cae.domain.general.Participant;
import edu.duke.catrip.cae.domain.general.Specimen;
import edu.pitt.cabig.cae.domain.breast.*;
import junit.framework.TestCase;
/**
* This unit test tests the retrieve of all the CAE objects. This also tests the insert of
* Specimen, Accession and Participant.
* @testType unit
*/
public class HarmonizedObjectsTest extends TestCase {
private static int globalId = 44;
public static void main(String[] args) {
}
protected void setUp() throws Exception {
super.setUp();
}
protected void tearDown() throws Exception {
super.tearDown();
}
public static void testParticipant() {
Participant p = new Participant();
p.setId(Long.valueOf(globalId));
p.setLastName("last name");
p.setFirstName("first name");
p.setRace("race");
p.setEthnicity("ethnicity");
Set<edu.duke.catrip.cae.domain.general.Accession> accessionSet = new HashSet<Accession>();
for (int i = 0; i < 3; i++) {
edu.duke.catrip.cae.domain.general.Accession acc = new Accession();
acc.setAccessionDate(new Date(0));
acc.setDiseaseType("diseasetype");
acc.setSurgicalPathologyNumber("SurgicalPathologyNumber");
accessionSet.add(acc);
}
p.setAccessionCollection(accessionSet);
create(p);
}
public static void testBreastSpecimenCharacteristics() {
BreastSpecimenCharacteristics p = new BreastSpecimenCharacteristics();
p.setId(Long.valueOf(globalId));
p.setLaterality("laterality");
p.setLateralityMVR("lateralityMVR");
p.setLymphNodeSamplingProcedure("lymphNodeSamplingProcedure");
create(p);
}
public static void testSpecimen() {
edu.duke.catrip.cae.domain.general.Specimen p = new edu.duke.catrip.cae.domain.general.Specimen();
//p.setId(Long.valueOf(globalId));
p.setSurgicalLabel("SurgicalLabel");
Set<Specimen> objSet = new HashSet<Specimen>();
for (int i = 0; i < 2; i++) {
edu.duke.catrip.cae.domain.general.Specimen s = new edu.duke.catrip.cae.domain.general.Specimen();
s.setSurgicalLabel("SurgicalLabel " + i);
objSet.add(s);
}
p.setSpecimenCollection(objSet);
create(p);
}
public static void testEventParameters() {
Set<AnnotationSet> annSet = new HashSet<AnnotationSet>();
AnnotationEventParameters aep = new AnnotationEventParameters();
aep.setId(Long.valueOf(globalId));
aep.setTimeStamp(new Date(0));
aep.setSource("source");
aep.setSourceDate(new Date(0));
// create some annotations sets
for (int i = 0; i < 2; i++) {
ThreeDimensionalTumorSize set = new ThreeDimensionalTumorSize();
set.setId(Long.valueOf(i));
annSet.add(set);
}
aep.setAnnotationSetCollection(annSet);
create(aep);
}
public static void testAccession() {
Participant pp = new Participant();
pp.setId(Long.valueOf(globalId));
pp.setLastName("last name" );
pp.setFirstName("first name");
pp.setRace("race");
pp.setEthnicity("ethnicity");
Set<Accession> objSet = new HashSet<Accession>();
for (int i = 0; i < 3; i++) {
edu.duke.catrip.cae.domain.general.Accession p = new edu.duke.catrip.cae.domain.general.Accession();
p.setAccessionDate(new Date(0));
p.setDiseaseType("disease type " + i);
p.setSurgicalPathologyNumber("surgicalPathologyNumber");
objSet.add(p);
}
pp.setAccessionCollection(objSet);
create(pp);
}
public static void create(Object obj) {
Transaction tx = null;
Session session = currentSession();
try {
tx = session.beginTransaction();
session.save(obj);
tx.commit();
} catch (HibernateException e) {
e.printStackTrace();
if (tx != null )
tx.rollback();
assertTrue(false);
}
}
public static void testRetrieveOtherBreastCancerHistopathologicGrade() throws Exception {
Session session = currentSession();
Transaction tx = session.beginTransaction();
List result = new ArrayList();
result = session.createQuery("from OtherBreastCancerHistopathologicGrade").list();
tx.commit();
closeSession();
for (int i = 0; i<result.size(); i++) {
OtherBreastCancerHistopathologicGrade obj = (OtherBreastCancerHistopathologicGrade) result.get(i);
System.out.println("ID is " + obj.getId());
System.out.println("systemName is " + obj.getSystemName());
System.out.println("score is " + obj.getScore());
System.out.println("mitoticCount is " + obj.getMitoticCount());
System.out.println("systemName is " + obj.getScoreMVR());
}
}
public static void testRetrieveNottinghamHistopathologicGrade() throws Exception {
Session session = currentSession();
Transaction tx = session.beginTransaction();
List result = new ArrayList();
result = session.createQuery("from NottinghamHistopathologicGrade").list();
tx.commit();
closeSession();
for (int i = 0; i<result.size(); i++) {
NottinghamHistopathologicGrade obj = (NottinghamHistopathologicGrade) result.get(i);
System.out.println("ID is " + obj.getId());
System.out.println("systemName is " + obj.getTotalScoreMVR());
System.out.println("MitoticCount is " + obj.getMitoticCount());
System.out.println("NuclearPleomorphism is " + obj.getNuclearPleomorphism());
System.out.println("total score is " + obj.getTotalScore());
System.out.println("tubuleFormation " + obj.getTubuleFormation());
}
}
public static void testRetrieveParticipantAccession() throws Exception {
Session session = currentSession();
Transaction tx = session.beginTransaction();
int j;
List result = new ArrayList();
result = session.createQuery("from edu.duke.catrip.cae.domain.general.Participant").list();
tx.commit();
closeSession();
for (int i = 0; i<result.size(); i++) {
Participant obj = (Participant) result.get(i);
System.out.println("*********************");
j = i;
System.out.print("#"+(j+1) + " - ");
System.out.println("ID is : " + obj.getId());
System.out.println("Last name is : " + obj.getLastName());
System.out.println("First name is : " + obj.getFirstName());
System.out.println("Accessions : ");
System.out.println("is null ? : " + (obj.getAccessionCollection() == null));
Collection sites = (Collection)obj.getAccessionCollection();
Iterator itr = sites.iterator();
while (itr.hasNext()) {
Accession a = (Accession)itr.next();
System.out.println(a.getId());
System.out.println(a.getDiseaseType());
}
}
}
public static void testRetrieveInvasiveBreastCarcinoma() throws Exception {
Session session = currentSession();
Transaction tx = session.beginTransaction();
List result = new ArrayList();
result = session.createQuery("from InvasiveBreastCarcinoma").list();
tx.commit();
closeSession();
for (int i = 0; i<result.size(); i++) {
InvasiveBreastCarcinoma obj = (InvasiveBreastCarcinoma) result.get(i);
System.out.println("ID is " + obj.getId());
System.out.println("LocationMVR is " + obj.getLocationMVR());
System.out.println("VenousLymphaticInvasion is " + obj.getVenousLymphaticInvasion());
List sites = (List)obj.getLocation();
Iterator itr = sites.iterator();
while (itr.hasNext()) {
System.out.println(itr.next());
}
sites = (List)obj.getMicrocalcificationLocation();
itr = sites.iterator();
while (itr.hasNext()) {
System.out.println(itr.next());
}
}
}
public static void testRetrieveInvasiveBreastCarcinomaNeoplasmHistologicType() throws Exception {
Session session = currentSession();
Transaction tx = session.beginTransaction();
List result = new ArrayList();
result = session.createQuery("from InvasiveBreastCarcinomaNeoplasmHistologicType").list();
tx.commit();
closeSession();
for (int i = 0; i<result.size(); i++) {
InvasiveBreastCarcinomaNeoplasmHistologicType obj = (InvasiveBreastCarcinomaNeoplasmHistologicType) result.get(i);
System.out.println("ID is " + obj.getId());
System.out.println("Name is " + obj.getName());
System.out.println("name mvr is " + obj.getNameMVR());
System.out.println("other name is " + obj.getOtherName());
}
}
public static void testRetreiveThreeDimensionalSize() throws Exception {
Session session = currentSession();
Transaction tx = session.beginTransaction();
int j;
List result = new ArrayList();
result = session.createQuery("from ThreeDimensionalSize").list();
tx.commit();
closeSession();
for (int i = 0; i<result.size(); i++) {
ThreeDimensionalTumorSize obj = (ThreeDimensionalTumorSize) result.get(i);
System.out.println("*********************");
j = i;
System.out.print("#"+(j+1) + " - ");
System.out.println("ID is : " + obj.getId());
System.out.println("dditionalDimensionY is : " + obj.getAdditionalDimensionY());
System.out.println("name mvr is : " + obj.getMVR());
System.out.println("additionalDimensionZ : " + obj.getAdditionalDimensionZ());
System.out.println("GreatestDimension : " + obj.getGreatestDimension());
}
}
public static void testRetrieveAdditionalFindings() throws Exception {
Session session = currentSession();
Transaction tx = session.beginTransaction();
List result = new ArrayList();
result = session.createQuery("from AdditionalFindings").list();
tx.commit();
closeSession();
for (int i = 0; i<result.size(); i++) {
AdditionalFindings obj = (AdditionalFindings) result.get(i);
System.out.println("ID is : " + obj.getId());
System.out.println("other Findings is : " + obj.getOtherFindings());
}
}
public static void testRetrieveBreastCancerTNMFinding() throws Exception {
Session session = currentSession();
Transaction tx = session.beginTransaction();
//int j;
List result = new ArrayList();
result = session.createQuery("from BreastCancerTNMFinding").list();
tx.commit();
closeSession();
for (int i = 0; i<result.size(); i++) {
BreastCancerTNMFinding obj = (BreastCancerTNMFinding) result.get(i);
//System.out.print("#"+(j+1) + " - ");
System.out.println("ID is : " + obj.getId());
System.out.println("DistantMetastasisFinding is : " + obj.getDistantMetastasisFinding());
System.out.println("category is : " + obj.getCategory());
System.out.println("OtherMetastaticAnatomicSite : " + obj.getOtherMetastaticAnatomicSite());
System.out.println("PrimaryTumorFinding : " + obj.getPrimaryTumorFinding());
}
}
private static final SessionFactory sessionFactory;
static {
try {
// Create the SessionFactory
sessionFactory = new Configuration().configure().buildSessionFactory();
} catch (Throwable ex) {
// Make sure you log the exception, as it might be swallowed
System.err.println("Initial SessionFactory creation failed." + ex);
throw new ExceptionInInitializerError(ex);
}
}
public static final ThreadLocal<Session> session = new ThreadLocal<Session>();
public static Session currentSession() throws HibernateException {
Session s = (Session) session.get();
// Open a new Session, if this Thread has none yet
if (s == null) {
s = sessionFactory.openSession();
session.set(s);
}
return s;
}
public static void closeSession() throws HibernateException {
Session s = (Session) session.get();
session.set(null);
if (s != null)
s.close();
}
}
| |
/* Copyright 2004, 2005, 2006 Acegi Technology Pty Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.authentication.jaas;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import java.io.File;
import java.io.FileOutputStream;
import java.io.PrintWriter;
import java.net.URL;
import java.security.Security;
import java.util.*;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import org.junit.Before;
import org.junit.Test;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.core.io.FileSystemResource;
import org.springframework.security.authentication.LockedException;
import org.springframework.security.authentication.TestingAuthenticationToken;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.context.SecurityContext;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.core.context.SecurityContextImpl;
import org.springframework.security.core.session.SessionDestroyedEvent;
/**
* Tests for the JaasAuthenticationProvider
*
* @author Ray Krueger
*/
public class JaasAuthenticationProviderTests {
// ~ Instance fields
// ================================================================================================
private ApplicationContext context;
private JaasAuthenticationProvider jaasProvider;
private JaasEventCheck eventCheck;
// ~ Methods
// ========================================================================================================
@Before
public void setUp() throws Exception {
String resName = "/" + getClass().getName().replace('.', '/') + ".xml";
context = new ClassPathXmlApplicationContext(resName);
eventCheck = (JaasEventCheck) context.getBean("eventCheck");
jaasProvider = (JaasAuthenticationProvider) context
.getBean("jaasAuthenticationProvider");
}
@Test
public void testBadPassword() {
try {
jaasProvider.authenticate(new UsernamePasswordAuthenticationToken("user",
"asdf"));
fail("LoginException should have been thrown for the bad password");
}
catch (AuthenticationException e) {
}
assertNotNull("Failure event not fired", eventCheck.failedEvent);
assertNotNull("Failure event exception was null",
eventCheck.failedEvent.getException());
assertNull("Success event was fired", eventCheck.successEvent);
}
@Test
public void testBadUser() {
try {
jaasProvider.authenticate(new UsernamePasswordAuthenticationToken("asdf",
"password"));
fail("LoginException should have been thrown for the bad user");
}
catch (AuthenticationException e) {
}
assertNotNull("Failure event not fired", eventCheck.failedEvent);
assertNotNull("Failure event exception was null",
eventCheck.failedEvent.getException());
assertNull("Success event was fired", eventCheck.successEvent);
}
@Test
public void testConfigurationLoop() throws Exception {
String resName = "/" + getClass().getName().replace('.', '/') + ".conf";
URL url = getClass().getResource(resName);
Security.setProperty("login.config.url.1", url.toString());
setUp();
testFull();
}
@Test
public void detectsMissingLoginConfig() throws Exception {
JaasAuthenticationProvider myJaasProvider = new JaasAuthenticationProvider();
myJaasProvider.setApplicationEventPublisher(context);
myJaasProvider.setAuthorityGranters(jaasProvider.getAuthorityGranters());
myJaasProvider.setCallbackHandlers(jaasProvider.getCallbackHandlers());
myJaasProvider.setLoginContextName(jaasProvider.getLoginContextName());
try {
myJaasProvider.afterPropertiesSet();
fail("Should have thrown ApplicationContextException");
}
catch (IllegalArgumentException expected) {
assertTrue(expected.getMessage().startsWith("loginConfig must be set on"));
}
}
// SEC-1239
@Test
public void spacesInLoginConfigPathAreAccepted() throws Exception {
File configFile;
// Create temp directory with a space in the name
File configDir = new File(System.getProperty("java.io.tmpdir") + File.separator
+ "jaas test");
configDir.deleteOnExit();
if (configDir.exists()) {
configDir.delete();
}
configDir.mkdir();
configFile = File.createTempFile("login", "conf", configDir);
configFile.deleteOnExit();
FileOutputStream fos = new FileOutputStream(configFile);
PrintWriter pw = new PrintWriter(fos);
pw.append("JAASTestBlah {"
+ "org.springframework.security.authentication.jaas.TestLoginModule required;"
+ "};");
pw.flush();
pw.close();
JaasAuthenticationProvider myJaasProvider = new JaasAuthenticationProvider();
myJaasProvider.setApplicationEventPublisher(context);
myJaasProvider.setLoginConfig(new FileSystemResource(configFile));
myJaasProvider.setAuthorityGranters(jaasProvider.getAuthorityGranters());
myJaasProvider.setCallbackHandlers(jaasProvider.getCallbackHandlers());
myJaasProvider.setLoginContextName(jaasProvider.getLoginContextName());
myJaasProvider.afterPropertiesSet();
}
@Test
public void detectsMissingLoginContextName() throws Exception {
JaasAuthenticationProvider myJaasProvider = new JaasAuthenticationProvider();
myJaasProvider.setApplicationEventPublisher(context);
myJaasProvider.setAuthorityGranters(jaasProvider.getAuthorityGranters());
myJaasProvider.setCallbackHandlers(jaasProvider.getCallbackHandlers());
myJaasProvider.setLoginConfig(jaasProvider.getLoginConfig());
myJaasProvider.setLoginContextName(null);
try {
myJaasProvider.afterPropertiesSet();
fail("Should have thrown IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
assertTrue(expected.getMessage()
.startsWith("loginContextName must be set on"));
}
myJaasProvider.setLoginContextName("");
try {
myJaasProvider.afterPropertiesSet();
fail("Should have thrown IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
assertTrue(expected.getMessage()
.startsWith("loginContextName must be set on"));
}
}
@Test
public void testFull() throws Exception {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"user", "password", AuthorityUtils.createAuthorityList("ROLE_ONE"));
assertTrue(jaasProvider.supports(UsernamePasswordAuthenticationToken.class));
Authentication auth = jaasProvider.authenticate(token);
assertNotNull(jaasProvider.getAuthorityGranters());
assertNotNull(jaasProvider.getCallbackHandlers());
assertNotNull(jaasProvider.getLoginConfig());
assertNotNull(jaasProvider.getLoginContextName());
Collection<? extends GrantedAuthority> list = auth.getAuthorities();
Set<String> set = AuthorityUtils.authorityListToSet(list);
assertFalse("GrantedAuthorities should not contain ROLE_1",
set.contains("ROLE_ONE"));
assertTrue("GrantedAuthorities should contain ROLE_TEST1",
set.contains("ROLE_TEST1"));
assertTrue("GrantedAuthorities should contain ROLE_TEST2",
set.contains("ROLE_TEST2"));
boolean foundit = false;
for (GrantedAuthority a : list) {
if (a instanceof JaasGrantedAuthority) {
JaasGrantedAuthority grant = (JaasGrantedAuthority) a;
assertNotNull("Principal was null on JaasGrantedAuthority",
grant.getPrincipal());
foundit = true;
}
}
assertTrue("Could not find a JaasGrantedAuthority", foundit);
assertNotNull("Success event should be fired", eventCheck.successEvent);
assertEquals("Auth objects should be equal", auth,
eventCheck.successEvent.getAuthentication());
assertNull("Failure event should not be fired", eventCheck.failedEvent);
}
@Test
public void testGetApplicationEventPublisher() throws Exception {
assertNotNull(jaasProvider.getApplicationEventPublisher());
}
@Test
public void testLoginExceptionResolver() {
assertNotNull(jaasProvider.getLoginExceptionResolver());
jaasProvider.setLoginExceptionResolver(new LoginExceptionResolver() {
public AuthenticationException resolveException(LoginException e) {
return new LockedException("This is just a test!");
}
});
try {
jaasProvider.authenticate(new UsernamePasswordAuthenticationToken("user",
"password"));
}
catch (LockedException e) {
}
catch (Exception e) {
fail("LockedException should have been thrown and caught");
}
}
@Test
public void testLogout() throws Exception {
MockLoginContext loginContext = new MockLoginContext(
jaasProvider.getLoginContextName());
JaasAuthenticationToken token = new JaasAuthenticationToken(null, null,
loginContext);
SecurityContext context = SecurityContextHolder.createEmptyContext();
context.setAuthentication(token);
SessionDestroyedEvent event = mock(SessionDestroyedEvent.class);
when(event.getSecurityContexts()).thenReturn(Arrays.asList(context));
jaasProvider.handleLogout(event);
assertTrue(loginContext.loggedOut);
}
@Test
public void testNullDefaultAuthorities() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"user", "password");
assertTrue(jaasProvider.supports(UsernamePasswordAuthenticationToken.class));
Authentication auth = jaasProvider.authenticate(token);
assertTrue("Only ROLE_TEST1 and ROLE_TEST2 should have been returned", auth
.getAuthorities().size() == 2);
}
@Test
public void testUnsupportedAuthenticationObjectReturnsNull() {
assertNull(jaasProvider.authenticate(new TestingAuthenticationToken("foo", "bar",
AuthorityUtils.NO_AUTHORITIES)));
}
// ~ Inner Classes
// ==================================================================================================
private static class MockLoginContext extends LoginContext {
boolean loggedOut = false;
public MockLoginContext(String loginModule) throws LoginException {
super(loginModule);
}
public void logout() throws LoginException {
this.loggedOut = true;
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.admob.v1.model;
/**
* The specification for generating an AdMob Mediation report. For example, the specification to get
* observed ECPM sliced by ad source and app for the 'US' and 'CN' countries can look like the
* following example: { "date_range": { "start_date": {"year": 2021, "month": 9, "day": 1},
* "end_date": {"year": 2021, "month": 9, "day": 30} }, "dimensions": ["AD_SOURCE", "APP",
* "COUNTRY"], "metrics": ["OBSERVED_ECPM"], "dimension_filters": [ { "dimension": "COUNTRY",
* "matches_any": {"values": [{"value": "US", "value": "CN"}]} } ], "sort_conditions": [
* {"dimension":"APP", order: "ASCENDING"} ], "localization_settings": { "currency_code": "USD",
* "language_code": "en-US" } } For a better understanding, you can treat the preceding
* specification like the following pseudo SQL: SELECT AD_SOURCE, APP, COUNTRY, OBSERVED_ECPM FROM
* MEDIATION_REPORT WHERE DATE >= '2021-09-01' AND DATE <= '2021-09-30' AND COUNTRY IN ('US', 'CN')
* GROUP BY AD_SOURCE, APP, COUNTRY ORDER BY APP ASC;
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the AdMob API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class MediationReportSpec extends com.google.api.client.json.GenericJson {
/**
* The date range for which the report is generated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DateRange dateRange;
/**
* Describes which report rows to match based on their dimension values.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<MediationReportSpecDimensionFilter> dimensionFilters;
/**
* List of dimensions of the report. The value combination of these dimensions determines the row
* of the report. If no dimensions are specified, the report returns a single row of requested
* metrics for the entire account.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> dimensions;
/**
* Localization settings of the report.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private LocalizationSettings localizationSettings;
/**
* Maximum number of report data rows to return. If the value is not set, the API returns as many
* rows as possible, up to 100000. Acceptable values are 1-100000, inclusive. Values larger than
* 100000 return an error.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer maxReportRows;
/**
* List of metrics of the report. A report must specify at least one metric.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> metrics;
/**
* Describes the sorting of report rows. The order of the condition in the list defines its
* precedence; the earlier the condition, the higher its precedence. If no sort conditions are
* specified, the row ordering is undefined.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<MediationReportSpecSortCondition> sortConditions;
/**
* A report time zone. Accepts an IANA TZ name values, such as "America/Los_Angeles." If no time
* zone is defined, the account default takes effect. Check default value by the get account
* action. **Warning:** The "America/Los_Angeles" is the only supported value at the moment.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String timeZone;
/**
* The date range for which the report is generated.
* @return value or {@code null} for none
*/
public DateRange getDateRange() {
return dateRange;
}
/**
* The date range for which the report is generated.
* @param dateRange dateRange or {@code null} for none
*/
public MediationReportSpec setDateRange(DateRange dateRange) {
this.dateRange = dateRange;
return this;
}
/**
* Describes which report rows to match based on their dimension values.
* @return value or {@code null} for none
*/
public java.util.List<MediationReportSpecDimensionFilter> getDimensionFilters() {
return dimensionFilters;
}
/**
* Describes which report rows to match based on their dimension values.
* @param dimensionFilters dimensionFilters or {@code null} for none
*/
public MediationReportSpec setDimensionFilters(java.util.List<MediationReportSpecDimensionFilter> dimensionFilters) {
this.dimensionFilters = dimensionFilters;
return this;
}
/**
* List of dimensions of the report. The value combination of these dimensions determines the row
* of the report. If no dimensions are specified, the report returns a single row of requested
* metrics for the entire account.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getDimensions() {
return dimensions;
}
/**
* List of dimensions of the report. The value combination of these dimensions determines the row
* of the report. If no dimensions are specified, the report returns a single row of requested
* metrics for the entire account.
* @param dimensions dimensions or {@code null} for none
*/
public MediationReportSpec setDimensions(java.util.List<java.lang.String> dimensions) {
this.dimensions = dimensions;
return this;
}
/**
* Localization settings of the report.
* @return value or {@code null} for none
*/
public LocalizationSettings getLocalizationSettings() {
return localizationSettings;
}
/**
* Localization settings of the report.
* @param localizationSettings localizationSettings or {@code null} for none
*/
public MediationReportSpec setLocalizationSettings(LocalizationSettings localizationSettings) {
this.localizationSettings = localizationSettings;
return this;
}
/**
* Maximum number of report data rows to return. If the value is not set, the API returns as many
* rows as possible, up to 100000. Acceptable values are 1-100000, inclusive. Values larger than
* 100000 return an error.
* @return value or {@code null} for none
*/
public java.lang.Integer getMaxReportRows() {
return maxReportRows;
}
/**
* Maximum number of report data rows to return. If the value is not set, the API returns as many
* rows as possible, up to 100000. Acceptable values are 1-100000, inclusive. Values larger than
* 100000 return an error.
* @param maxReportRows maxReportRows or {@code null} for none
*/
public MediationReportSpec setMaxReportRows(java.lang.Integer maxReportRows) {
this.maxReportRows = maxReportRows;
return this;
}
/**
* List of metrics of the report. A report must specify at least one metric.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getMetrics() {
return metrics;
}
/**
* List of metrics of the report. A report must specify at least one metric.
* @param metrics metrics or {@code null} for none
*/
public MediationReportSpec setMetrics(java.util.List<java.lang.String> metrics) {
this.metrics = metrics;
return this;
}
/**
* Describes the sorting of report rows. The order of the condition in the list defines its
* precedence; the earlier the condition, the higher its precedence. If no sort conditions are
* specified, the row ordering is undefined.
* @return value or {@code null} for none
*/
public java.util.List<MediationReportSpecSortCondition> getSortConditions() {
return sortConditions;
}
/**
* Describes the sorting of report rows. The order of the condition in the list defines its
* precedence; the earlier the condition, the higher its precedence. If no sort conditions are
* specified, the row ordering is undefined.
* @param sortConditions sortConditions or {@code null} for none
*/
public MediationReportSpec setSortConditions(java.util.List<MediationReportSpecSortCondition> sortConditions) {
this.sortConditions = sortConditions;
return this;
}
/**
* A report time zone. Accepts an IANA TZ name values, such as "America/Los_Angeles." If no time
* zone is defined, the account default takes effect. Check default value by the get account
* action. **Warning:** The "America/Los_Angeles" is the only supported value at the moment.
* @return value or {@code null} for none
*/
public java.lang.String getTimeZone() {
return timeZone;
}
/**
* A report time zone. Accepts an IANA TZ name values, such as "America/Los_Angeles." If no time
* zone is defined, the account default takes effect. Check default value by the get account
* action. **Warning:** The "America/Los_Angeles" is the only supported value at the moment.
* @param timeZone timeZone or {@code null} for none
*/
public MediationReportSpec setTimeZone(java.lang.String timeZone) {
this.timeZone = timeZone;
return this;
}
@Override
public MediationReportSpec set(String fieldName, Object value) {
return (MediationReportSpec) super.set(fieldName, value);
}
@Override
public MediationReportSpec clone() {
return (MediationReportSpec) super.clone();
}
}
| |
package com.web.libraries.imdbquery.api;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
import org.reflections.Reflections;
import org.reflections.scanners.SubTypesScanner;
import org.reflections.util.ClasspathHelper;
import org.reflections.util.ConfigurationBuilder;
import org.reflections.util.FilterBuilder;
import com.web.libraries.imdbquery.api.configuration.WebRestQueryConfiguration;
import com.web.libraries.imdbquery.api.configuration.WebRestQueryConfigurationBuilder;
import com.web.libraries.imdbquery.api.controller.APIEngineFactory;
import com.web.libraries.imdbquery.api.exception.WebRestQueryException;
import com.web.libraries.imdbquery.api.types.APIEngine;
import com.web.libraries.imdbquery.api.types.ResultCollector;
import com.web.libraries.imdbquery.api.types.ResultFormatter;
import com.web.libraries.imdbquery.api.utils.Constants;
import com.web.libraries.imdbquery.api.utils.Logger;
/**
* Web Crawler Helper Class
* @author Fabrizio Torelli (hellgate75@gmail.com)
* @since Java 1.8
*/
public class WebRestQueryHelper {
private static Properties webRestQueryProperties = new Properties();
private static Map<String, WebRestQueryConfiguration> projectsConfigurationMap = new LinkedHashMap<String, WebRestQueryConfiguration>(0);
private final static Reflections collectorsReflections = new Reflections(new ConfigurationBuilder()
.setUrls(ClasspathHelper.forPackage("com.web.libraries.imdbquery.command.collectors"))
.setScanners(new SubTypesScanner())
.filterInputsBy(new FilterBuilder().includePackage("com.web.libraries.imdbquery.command.collectors")));
private final static Reflections formattersReflections = new Reflections(new ConfigurationBuilder()
.setUrls(ClasspathHelper.forPackage("com.web.libraries.imdbquery.command.formatters"))
.setScanners(new SubTypesScanner())
.filterInputsBy(new FilterBuilder().includePackage("com.web.libraries.imdbquery.command.formatters")));
static {
try (InputStream propertiesInputStream = ClassLoader.getSystemResourceAsStream("application.properties");) {
try {
Logger.debug("InputStream = " + propertiesInputStream, WebRestQueryHelper.class);
webRestQueryProperties.load(propertiesInputStream);
} catch (IOException | NullPointerException e) {
Logger.error("Error loading default properties",e, WebRestQueryHelper.class);
}
} catch (IOException e) {
Logger.error("Error loading default properties",e, WebRestQueryHelper.class);
}
}
private static APIEngineFactory defaultApiFactory = new APIEngineFactory();
/**
* Retrieve the list of api query engine default properties
* @return list of default properties
*/
public static final Properties getDefaultApiQueryProperties() {
return webRestQueryProperties;
}
/**
* Parse the comand line arguments and create and execute the crawler or print the help.
* In case of needeing the usage is available typing as argument : --help
* @param arguments command line arguments
* @throws WebRestQueryException Exception occurring during the Crawler execution
*/
public static final void parseParametersAndRunCrawler(String[] arguments) throws WebRestQueryException {
WebRestQueryConfigurationBuilder builder = new WebRestQueryConfigurationBuilder();
for(String argument: arguments) {
if (argument.toLowerCase().indexOf("--" + Constants.COMMAND_PROPERTY_HELP_REQUEST)>=0) {
printWebRESTAPIUsage(System.out);
System.exit(0);
}
else if (argument.toLowerCase().indexOf("--" + Constants.COMMAND_PROPERTY_API_DB + "=")>=0) {
try {
builder.apiDatabase( defaultApiFactory.bySelector(argument.substring(argument.indexOf("=")+1)) );
} catch (NullPointerException e) {
String message = "Error parsing parameter '"+Constants.COMMAND_PROPERTY_API_DB+"'";
Logger.error(message, e, WebRestQueryHelper.class);
printWebRESTAPIUsage(System.out, message);
}
}
else if (argument.toLowerCase().indexOf("--" + Constants.COMMAND_PROPERTY_MOVIE_TITLE + "=")>=0) {
try {
builder.movieTitle( argument.substring(argument.indexOf("=")+1) );
} catch (NullPointerException e) {
String message = "Error parsing parameter '"+Constants.COMMAND_PROPERTY_MOVIE_TITLE+"'";
Logger.error(message, e, WebRestQueryHelper.class);
printWebRESTAPIUsage(System.out, message);
}
}
else if (argument.toLowerCase().indexOf("--" + Constants.COMMAND_PROPERTY_EXACT_SEARCH + "=")>=0) {
try {
builder.exactSearch( Boolean.parseBoolean(argument.substring(argument.indexOf("=")+1)) );
} catch (NullPointerException e) {
String message = "Error parsing parameter '"+Constants.COMMAND_PROPERTY_EXACT_SEARCH+"'";
Logger.error(message, e, WebRestQueryHelper.class);
printWebRESTAPIUsage(System.out, message);
}
}
else if (argument.toLowerCase().indexOf("--" + Constants.COMMAND_PROPERTY_PARALLEL_THREADS + "=")>=0) {
try {
builder.thredExtends( Integer.parseInt(argument.substring(argument.indexOf("=")+1)) );
} catch (NumberFormatException|NullPointerException e) {
String message = "Error parsing parameter '"+Constants.COMMAND_PROPERTY_PARALLEL_THREADS+"'";
Logger.error(message, e, WebRestQueryHelper.class);
printWebRESTAPIUsage(System.out, message);
}
}
else if (argument.toLowerCase().indexOf("--" + Constants.COMMAND_PROPERTY_REPORT_FORMATTER + "=")>=0) {
try {
builder.formatter( argument.substring(argument.indexOf("=")+1) );
} catch (NullPointerException e) {
String message = "Error parsing parameter '"+Constants.COMMAND_PROPERTY_REPORT_FORMATTER+"'";
Logger.error(message, e, WebRestQueryHelper.class);
printWebRESTAPIUsage(System.out, message);
}
}
else if (argument.toLowerCase().indexOf("--" + Constants.COMMAND_PROPERTY_REPORT_COLLETOR + "=")>=0) {
try {
builder.outputType( argument.substring(argument.indexOf("=")+1) );
} catch (NullPointerException e) {
String message = "Error parsing parameter '"+Constants.COMMAND_PROPERTY_REPORT_COLLETOR+"'";
Logger.error(message, e, WebRestQueryHelper.class);
printWebRESTAPIUsage(System.out, message);
}
}
}
if (System.getProperties().containsKey(Constants.COMMAND_PROPERTY_API_DB)) {
try {
builder.apiDatabase(defaultApiFactory.bySelector((String)System.getProperties().get(Constants.COMMAND_PROPERTY_API_DB)));
} catch (NullPointerException e) {
builder.apiDatabase(getDefultApiDatabase());
}
}
if (System.getProperties().containsKey(Constants.COMMAND_PROPERTY_EXACT_SEARCH)) {
try {
builder.exactSearch( Boolean.parseBoolean((String)System.getProperties().get(Constants.COMMAND_PROPERTY_EXACT_SEARCH)) );
} catch (NullPointerException e) {
builder.exactSearch(getDefultExactSearch());
}
}
if (System.getProperties().containsKey(Constants.COMMAND_PROPERTY_MOVIE_TITLE)) {
builder.movieTitle((String)System.getProperties().get(Constants.COMMAND_PROPERTY_MOVIE_TITLE));
}
WebRestQueryConfiguration configuration = builder.build();
ResultCollector collector=getCollector(configuration.getOutputType(),configuration.getFormatter());
Logger.debug("configuration api db = " + configuration.getAPIDatabase(), WebRestQueryHelper.class);
Logger.debug("configuration search for = " + configuration.getMovieTitle(), WebRestQueryHelper.class);
new WebRestQuery(collector, configuration).start();
}
/**
* Print the help usage text in a specific output stream
* @param output The select help stream
*/
public static final void printWebRESTAPIUsage(PrintStream output) {
printWebRESTAPIUsage(output, null);
}
public static final String getLoggerConfigFile() {
return (String)webRestQueryProperties.get("logging.config.path");
}
/**
* Print the help usage text in a specific output stream
* @param output The select help stream
* @param message Message to display
*/
public static final void printWebRESTAPIUsage(PrintStream output, String message) {
if(message!=null) {
output.println("Message: " + message);
}
output.println("Usage: api-search [options]");
output.println("samples: api-search --format=simple --api imdb --movie \"Indiana Jones\"");
output.println(" api-search --movie \"Herry Potter\"");
output.println("options:");
output.println("--"+Constants.COMMAND_PROPERTY_REPORT_FORMATTER+"\t\tIdentifies the site map format");
output.println(" available formats :");
for (ResultFormatter formatter: getFormatters())
output.println(" " + formatter.getSelector() + "\t\t" + formatter.getDescription());
output.println("default : " + getDefultFormatter());
output.println("--"+Constants.COMMAND_PROPERTY_REPORT_COLLETOR+"\t\tIdentifies output device");
output.println(" available output :");
for (ResultCollector collector: getCollectors())
output.println(" " + collector.getSelector() + "\t\t" + collector.getDescription());
output.println("default : " + getDefultOutputType());
output.println("--"+Constants.COMMAND_PROPERTY_PARALLEL_THREADS+"\t\tIdentifies the number of threads extension on the surf. This is the minimum number of threads");
output.println("\t\t\trunning on the site hierarchy discovery");
output.println("default : " + getDefultThredExtends());
output.println("-D"+Constants.COMMAND_PROPERTY_MOVIE_TITLE+"=<value>\t\tIdentifies the movie title query name");
output.println("--"+Constants.COMMAND_PROPERTY_MOVIE_TITLE+"\t\tIdentifies the movie title query name");
output.println("default : " + getDefultMovieTitle());
output.println("-D"+Constants.COMMAND_PROPERTY_EXACT_SEARCH+"=<value>\t\tIdentifies the exact title search");
output.println("--"+Constants.COMMAND_PROPERTY_EXACT_SEARCH+"\t\tIdentifies the exact title search");
output.println("default : " + getDefultExactSearch());
output.println("-D"+Constants.COMMAND_PROPERTY_API_DB+"=<value>\t\tIdentifies the default used API database");
output.println("--"+Constants.COMMAND_PROPERTY_API_DB+"\t\tIdentifies the default used API database");
output.println(" available engines :");
for (APIEngine engine: defaultApiFactory.getAvailableEngines()) {
output.println(" " + engine.getSelector() + "\t\t" + engine.getDescription());
}
output.println("default : " + getDefultApiDatabase());
}
/**
* Retrieves the default movie title
* @return The default movie Title
*/
public static final String getDefultMovieTitle() {
if (webRestQueryProperties!=null && webRestQueryProperties.containsKey(Constants.PROPERTIES_DEFAULT_MOVIE_TITLE))
return (String)webRestQueryProperties.get(Constants.PROPERTIES_DEFAULT_MOVIE_TITLE);
else
return null;
}
/**
* Retrieves the default web site URL
* @return The default web site URL
*/
public static final APIEngine getDefultApiDatabase() {
if (webRestQueryProperties!=null && webRestQueryProperties.containsKey(Constants.PROPERTIES_DEFAULT_API_DATABASE))
return defaultApiFactory.bySelector((String)webRestQueryProperties.get(Constants.PROPERTIES_DEFAULT_API_DATABASE));
else
return defaultApiFactory.bySelector(Constants.PROPERTIES_DEFAULT_API_DATABASE);
}
/**
* Retrieves the default site map response formatter
* @return The default site map response formatter
*/
public static final String getDefultFormatter() {
return (String)webRestQueryProperties.getOrDefault(Constants.PROPERTIES_DEFAULT_REPORT_FORMATTER, "plain");
}
/**
* Retrieves the default output device
* @return The default output device
*/
public static final String getDefultOutputType() {
return (String)webRestQueryProperties.getOrDefault(Constants.PROPERTIES_DEFAULT_REPORT_COLLECTOR, "stdout");
}
/**
* Retrieves the default thread extends to execute the Web Site Pages surfing.
* @return The default thread extends
*/
public static final Integer getDefultThredExtends() {
try {
return Integer.parseInt((String)webRestQueryProperties.getOrDefault(Constants.PROPERTIES_DEFAULT_PARALLEL_THREADS, "10"));
} catch (NumberFormatException e) {
return 10;
}
}
/**
* Retrieves the default thread extends to execute the Web Site Pages surfing.
* @return The default thread extends
*/
public static final Boolean getDefultExactSearch() {
try {
return (Integer.parseInt((String)webRestQueryProperties.getOrDefault(Constants.PROPERTIES_DEFAULT_PARALLEL_THREADS, "1"))) == 1;
} catch (NumberFormatException e) {
return Boolean.TRUE;
}
}
/**
* Register a project in the registry.
* This feature at the moment is a simple in memory registry, it will be provided a persistent registry in the future
* to allow the off line execution, the reschedule and the access from remote services.
* @param configuration Web Crawler Project Configuration
* @return The Project Unique ID
*/
public static String regiterWebCrawerProject(WebRestQueryConfiguration configuration) {
String projectUID = UUID.randomUUID().toString();
projectsConfigurationMap.put(projectUID, configuration);
return projectUID;
}
/**
* Retrieves a Web Crawler Project configuration from the registry
* @param projectUID Project Unique ID
* @return The selected configuration or null i the UID doesn't exists in the registry
*/
public static final WebRestQueryConfiguration getProjectConfiguration(String projectUID) {
return projectsConfigurationMap.get(projectUID);
}
private static final Set<Class<? extends ResultCollector>> getCollectorClasses() {
return collectorsReflections.getSubTypesOf(ResultCollector.class);
}
private static final Set<Class<? extends ResultFormatter>> getFormatterClasses() {
return formattersReflections.getSubTypesOf(ResultFormatter.class);
}
/**
* Extract a complete ResultCollector, provided of Formatted according to the provided selectors. In case of mistakes
* the system try to provide the default ones.
* @param collectorSelector Selector used to retrieve a ResultCollector
* @param formatterSelector Selector used to retrieve a ResultFormatter
* @return ResultCollector according to the selectors or the retry policies
* @see ResultCollector
* @see ResultFormatter
*/
public static final ResultCollector getCollector(String collectorSelector, String formatterSelector) {
return getCollector(collectorSelector, formatterSelector, false);
}
private static final ResultCollector getCollector(String collectorSelector, String formatterSelector, boolean retry) {
ResultFormatter formatter = null;
for(ResultFormatter discoveredFormatter: getFormatters()) {
if (discoveredFormatter.getSelector().equalsIgnoreCase(formatterSelector)) {
formatter = discoveredFormatter;
break;
}
}
if (formatter==null) {
if (!retry)
return getCollector(collectorSelector, getDefultFormatter(), true);
else
return null;
}
ResultCollector collector = null;
for(ResultCollector discoveredCollector: getCollectors()) {
if (discoveredCollector.getSelector().equalsIgnoreCase(collectorSelector)) {
collector = discoveredCollector;
collector.setFormatter(formatter);
break;
}
}
if (collector==null) {
if (!retry)
return getCollector(getDefultOutputType(), formatterSelector, true);
else
return null;
}
return collector;
}
/**
* Return the list of the available and active collectors
* @return the list of collectors
* @see ResultCollector
*/
public static final List<ResultCollector> getCollectors() {
List<ResultCollector> collectors = new ArrayList<ResultCollector>(0);
for(Class<? extends ResultCollector> clazz: getCollectorClasses()) {
try {
ResultCollector collector = clazz.newInstance();
if (collector.isActive())
collectors.add(collector);
} catch (InstantiationException|IllegalAccessException e) {
Logger.error("Error retring class" + clazz, e, WebRestQueryHelper.class);
}
}
return collectors;
}
/**
* Return the list of the available and active formatters
* @return the list of formatters
* @see ResultFormatter
*/
public static final List<ResultFormatter> getFormatters() {
List<ResultFormatter> formatters = new ArrayList<ResultFormatter>(0);
for(Class<? extends ResultFormatter> clazz: getFormatterClasses()) {
try {
ResultFormatter formatter = clazz.newInstance();
if (formatter.isActive())
formatters.add(formatter);
} catch (InstantiationException|IllegalAccessException e) {
Logger.error("Error retring class" + clazz, e, WebRestQueryHelper.class);
}
}
return formatters;
}
}
| |
/*
* Copyright 1999-2011 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.jahhan.com.alibaba.dubbo.common.serialize.support.dubbo;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Serializable;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Matcher;
import lombok.extern.slf4j.Slf4j;
import net.jahhan.com.alibaba.dubbo.common.bytecode.ClassGenerator;
import net.jahhan.com.alibaba.dubbo.common.io.UnsafeByteArrayInputStream;
import net.jahhan.com.alibaba.dubbo.common.io.UnsafeByteArrayOutputStream;
import net.jahhan.com.alibaba.dubbo.common.serialize.support.java.CompactedObjectInputStream;
import net.jahhan.com.alibaba.dubbo.common.serialize.support.java.CompactedObjectOutputStream;
import net.jahhan.com.alibaba.dubbo.common.utils.ClassHelper;
import net.jahhan.com.alibaba.dubbo.common.utils.IOUtils;
import net.jahhan.com.alibaba.dubbo.common.utils.ReflectUtils;
import net.jahhan.common.extension.utils.StringUtils;
/**
* Builder.
*
* @author qian.lei
*
* @param <T>
* type.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
@Slf4j
public abstract class Builder<T> implements GenericDataFlags {
private static final AtomicLong BUILDER_CLASS_COUNTER = new AtomicLong(0);
private static final String BUILDER_CLASS_NAME = Builder.class.getName();
private static final Map<Class<?>, Builder<?>> BuilderMap = new ConcurrentHashMap<Class<?>, Builder<?>>();
private static final Map<Class<?>, Builder<?>> nonSerializableBuilderMap = new ConcurrentHashMap<Class<?>, Builder<?>>();
private static final String FIELD_CONFIG_SUFFIX = ".fc";
private static final int MAX_FIELD_CONFIG_FILE_SIZE = 16 * 1024;
private static final Comparator<String> FNC = new Comparator<String>() {
public int compare(String n1, String n2) {
return compareFieldName(n1, n2);
}
};
private static final Comparator<Field> FC = new Comparator<Field>() {
public int compare(Field f1, Field f2) {
return compareFieldName(f1.getName(), f2.getName());
}
};
private static final Comparator<Constructor> CC = new Comparator<Constructor>() {
public int compare(Constructor o1, Constructor o2) {
return o1.getParameterTypes().length - o2.getParameterTypes().length;
}
};
// class-descriptor mapper
private static final List<String> mDescList = new ArrayList<String>();
private static final Map<String, Integer> mDescMap = new ConcurrentHashMap<String, Integer>();
public static ClassDescriptorMapper DEFAULT_CLASS_DESCRIPTOR_MAPPER = new ClassDescriptorMapper() {
public String getDescriptor(int index) {
if (index < 0 || index >= mDescList.size())
return null;
return mDescList.get(index);
}
public int getDescriptorIndex(String desc) {
Integer ret = mDescMap.get(desc);
return ret == null ? -1 : ret.intValue();
}
};
protected Builder() {
}
abstract public Class<T> getType();
public void writeTo(T obj, OutputStream os) throws IOException {
GenericObjectOutput out = new GenericObjectOutput(os);
writeTo(obj, out);
out.flushBuffer();
}
public T parseFrom(byte[] b) throws IOException {
return parseFrom(new UnsafeByteArrayInputStream(b));
}
public T parseFrom(InputStream is) throws IOException {
return parseFrom(new GenericObjectInput(is));
}
abstract public void writeTo(T obj, GenericObjectOutput out) throws IOException;
abstract public T parseFrom(GenericObjectInput in) throws IOException;
public static <T> Builder<T> register(Class<T> c, boolean isAllowNonSerializable) {
if (c == Object.class || c.isInterface())
return (Builder<T>) GenericBuilder;
if (c == Object[].class)
return (Builder<T>) GenericArrayBuilder;
Builder<T> b = (Builder<T>) BuilderMap.get(c);
if (null != b)
return b;
boolean isSerializable = Serializable.class.isAssignableFrom(c);
if (!isAllowNonSerializable && !isSerializable) {
throw new IllegalStateException("Serialized class " + c.getName()
+ " must implement java.io.Serializable (dubbo codec setting: isAllowNonSerializable = false)");
}
b = (Builder<T>) nonSerializableBuilderMap.get(c);
if (null != b)
return b;
b = newBuilder(c);
if (isSerializable)
BuilderMap.put(c, b);
else
nonSerializableBuilderMap.put(c, b);
return b;
}
public static <T> Builder<T> register(Class<T> c) {
return register(c, false);
}
public static <T> void register(Class<T> c, Builder<T> b) {
if (Serializable.class.isAssignableFrom(c))
BuilderMap.put(c, b);
else
nonSerializableBuilderMap.put(c, b);
}
private static <T> Builder<T> newBuilder(Class<T> c) {
if (c.isPrimitive())
throw new RuntimeException("Can not create builder for primitive type: " + c);
if (log.isInfoEnabled())
log.info("create Builder for class: " + c);
Builder<?> builder;
if (c.isArray())
builder = newArrayBuilder(c);
else
builder = newObjectBuilder(c);
return (Builder<T>) builder;
}
private static Builder<?> newArrayBuilder(Class<?> c) {
Class<?> cc = c.getComponentType();
if (cc.isInterface())
return GenericArrayBuilder;
ClassLoader cl = ClassHelper.getCallerClassLoader(Builder.class);
String cn = ReflectUtils.getName(c), ccn = ReflectUtils.getName(cc); // get
// class
// name
// as
// int[][],
// double[].
String bcn = BUILDER_CLASS_NAME + "$bc" + BUILDER_CLASS_COUNTER.getAndIncrement();
int ix = cn.indexOf(']');
String s1 = cn.substring(0, ix), s2 = cn.substring(ix); // if
// name='int[][]'
// then
// s1='int[',
// s2='][]'
StringBuilder cwt = new StringBuilder("public void writeTo(Object obj, ")
.append(GenericObjectOutput.class.getName()).append(" out) throws java.io.IOException{"); // writeTo
// code.
StringBuilder cpf = new StringBuilder("public Object parseFrom(").append(GenericObjectInput.class.getName())
.append(" in) throws java.io.IOException{"); // parseFrom code.
cwt.append("if( $1 == null ){ $2.write0(OBJECT_NULL); return; }");
cwt.append(cn).append(" v = (").append(cn).append(
")$1; int len = v.length; $2.write0(OBJECT_VALUES); $2.writeUInt(len); for(int i=0;i<len;i++){ ");
cpf.append(
"byte b = $1.read0(); if( b == OBJECT_NULL ) return null; if( b != OBJECT_VALUES ) throw new java.io.IOException(\"Input format error, expect OBJECT_NULL|OBJECT_VALUES, get \" + b + \".\");");
cpf.append("int len = $1.readUInt(); if( len == 0 ) return new ").append(s1).append('0').append(s2)
.append("; ");
cpf.append(cn).append(" ret = new ").append(s1).append("len").append(s2).append("; for(int i=0;i<len;i++){ ");
Builder<?> builder = null;
if (cc.isPrimitive()) {
if (cc == boolean.class) {
cwt.append("$2.writeBool(v[i]);");
cpf.append("ret[i] = $1.readBool();");
} else if (cc == byte.class) {
cwt.append("$2.writeByte(v[i]);");
cpf.append("ret[i] = $1.readByte();");
} else if (cc == char.class) {
cwt.append("$2.writeShort((short)v[i]);");
cpf.append("ret[i] = (char)$1.readShort();");
} else if (cc == short.class) {
cwt.append("$2.writeShort(v[i]);");
cpf.append("ret[i] = $1.readShort();");
} else if (cc == int.class) {
cwt.append("$2.writeInt(v[i]);");
cpf.append("ret[i] = $1.readInt();");
} else if (cc == long.class) {
cwt.append("$2.writeLong(v[i]);");
cpf.append("ret[i] = $1.readLong();");
} else if (cc == float.class) {
cwt.append("$2.writeFloat(v[i]);");
cpf.append("ret[i] = $1.readFloat();");
} else if (cc == double.class) {
cwt.append("$2.writeDouble(v[i]);");
cpf.append("ret[i] = $1.readDouble();");
}
} else {
builder = register(cc);
cwt.append("builder.writeTo(v[i], $2);");
cpf.append("ret[i] = (").append(ccn).append(")builder.parseFrom($1);");
}
cwt.append(" } }");
cpf.append(" } return ret; }");
ClassGenerator cg = ClassGenerator.newInstance(cl);
cg.setClassName(bcn);
cg.setSuperClass(Builder.class);
cg.addDefaultConstructor();
if (builder != null)
cg.addField("public static " + BUILDER_CLASS_NAME + " builder;");
cg.addMethod("public Class getType(){ return " + cn + ".class; }");
cg.addMethod(cwt.toString());
cg.addMethod(cpf.toString());
try {
Class<?> wc = cg.toClass();
// set static field.
if (builder != null)
wc.getField("builder").set(null, builder);
return (Builder<?>) wc.newInstance();
} catch (RuntimeException e) {
throw e;
} catch (Throwable e) {
throw new RuntimeException(e.getMessage());
} finally {
cg.release();
}
}
private static Builder<?> newObjectBuilder(final Class<?> c) {
if (c.isEnum())
return newEnumBuilder(c);
if (c.isAnonymousClass())
throw new RuntimeException("Can not instantiation anonymous class: " + c);
if (c.getEnclosingClass() != null && !Modifier.isStatic(c.getModifiers()))
throw new RuntimeException("Can not instantiation inner and non-static class: " + c);
if (Throwable.class.isAssignableFrom(c))
return SerializableBuilder;
ClassLoader cl = ClassHelper.getCallerClassLoader(Builder.class);
// is same package.
boolean isp;
String cn = c.getName(), bcn;
if (c.getClassLoader() == null) // is system class. if(
// cn.startsWith("java.") ||
// cn.startsWith("javax.") ||
// cn.startsWith("sun.") )
{
isp = false;
bcn = BUILDER_CLASS_NAME + "$bc" + BUILDER_CLASS_COUNTER.getAndIncrement();
} else {
isp = true;
bcn = cn + "$bc" + BUILDER_CLASS_COUNTER.getAndIncrement();
}
// is Collection, is Map, is Serializable.
boolean isc = Collection.class.isAssignableFrom(c);
boolean ism = !isc && Map.class.isAssignableFrom(c);
boolean iss = !(isc || ism) && Serializable.class.isAssignableFrom(c);
// deal with fields.
String[] fns = null; // fix-order fields names
InputStream is = c.getResourceAsStream(c.getSimpleName() + FIELD_CONFIG_SUFFIX); // load
// field-config
// file.
if (is != null) {
try {
int len = is.available();
if (len > 0) {
if (len > MAX_FIELD_CONFIG_FILE_SIZE)
throw new RuntimeException(
"Load [" + c.getName() + "] field-config file error: File-size too larger");
String[] lines = IOUtils.readLines(is);
if (lines != null && lines.length > 0) {
List<String> list = new ArrayList<String>();
for (int i = 0; i < lines.length; i++) {
fns = lines[i].split(",");
Arrays.sort(fns, FNC);
for (int j = 0; j < fns.length; j++)
list.add(fns[j]);
}
fns = list.toArray(new String[0]);
}
}
} catch (IOException e) {
throw new RuntimeException("Load [" + c.getName() + "] field-config file error: " + e.getMessage());
} finally {
try {
is.close();
} catch (IOException e) {
}
}
}
Field f, fs[];
if (fns != null) {
fs = new Field[fns.length];
for (int i = 0; i < fns.length; i++) {
String fn = fns[i];
try {
f = c.getDeclaredField(fn);
int mod = f.getModifiers();
if (Modifier.isStatic(mod) || (serializeIgnoreFinalModifier(c) && Modifier.isFinal(mod)))
throw new RuntimeException("Field [" + c.getName() + "." + fn + "] is static/final field.");
if (Modifier.isTransient(mod)) {
if (iss)
return SerializableBuilder;
throw new RuntimeException("Field [" + c.getName() + "." + fn + "] is transient field.");
}
f.setAccessible(true);
fs[i] = f;
} catch (SecurityException e) {
throw new RuntimeException(e.getMessage());
} catch (NoSuchFieldException e) {
throw new RuntimeException("Field [" + c.getName() + "." + fn + "] not found.");
}
}
} else {
Class<?> t = c;
List<Field> fl = new ArrayList<Field>();
do {
fs = t.getDeclaredFields();
for (Field tf : fs) {
int mod = tf.getModifiers();
if (Modifier.isStatic(mod) || (serializeIgnoreFinalModifier(c) && Modifier.isFinal(mod))
|| tf.getName().equals("this$0") // skip static or
// inner-class's
// 'this$0'
// field.
|| !Modifier.isPublic(tf.getType().getModifiers())) // skip
// private
// inner-class
// field
continue;
if (Modifier.isTransient(mod)) {
if (iss)
return SerializableBuilder;
continue;
}
tf.setAccessible(true);
fl.add(tf);
}
t = t.getSuperclass();
} while (t != Object.class);
fs = fl.toArray(new Field[0]);
if (fs.length > 1)
Arrays.sort(fs, FC);
}
// deal with constructors.
Constructor<?>[] cs = c.getDeclaredConstructors();
if (cs.length == 0) {
Class<?> t = c;
do {
t = t.getSuperclass();
if (t == null)
throw new RuntimeException("Can not found Constructor?");
cs = t.getDeclaredConstructors();
} while (cs.length == 0);
}
if (cs.length > 1)
Arrays.sort(cs, CC);
// writeObject code.
StringBuilder cwf = new StringBuilder("protected void writeObject(Object obj, ")
.append(GenericObjectOutput.class.getName()).append(" out) throws java.io.IOException{");
cwf.append(cn).append(" v = (").append(cn).append(")$1; ");
cwf.append("$2.writeInt(fields.length);");
// readObject code.
StringBuilder crf = new StringBuilder("protected void readObject(Object ret, ")
.append(GenericObjectInput.class.getName()).append(" in) throws java.io.IOException{");
crf.append("int fc = $2.readInt();");
crf.append("if( fc != ").append(fs.length).append(" ) throw new IllegalStateException(\"Deserialize Class [")
.append(cn).append("], field count not matched. Expect ").append(fs.length)
.append(" but get \" + fc +\".\");");
crf.append(cn).append(" ret = (").append(cn).append(")$1;");
// newInstance code.
StringBuilder cni = new StringBuilder("protected Object newInstance(")
.append(GenericObjectInput.class.getName()).append(" in){ return ");
Constructor<?> con = cs[0];
int mod = con.getModifiers();
boolean dn = Modifier.isPublic(mod) || (isp && !Modifier.isPrivate(mod));
if (dn) {
cni.append("new ").append(cn).append("(");
} else {
con.setAccessible(true);
cni.append("constructor.newInstance(new Object[]{");
}
Class<?>[] pts = con.getParameterTypes();
for (int i = 0; i < pts.length; i++) {
if (i > 0)
cni.append(',');
cni.append(defaultArg(pts[i]));
}
if (!dn)
cni.append("}"); // close object array.
cni.append("); }");
// get bean-style property metadata.
Map<String, PropertyMetadata> pms = propertyMetadatas(c);
List<Builder<?>> builders = new ArrayList<Builder<?>>(fs.length);
String fn, ftn; // field name, field type name.
Class<?> ft; // field type.
boolean da; // direct access.
PropertyMetadata pm;
for (int i = 0; i < fs.length; i++) {
f = fs[i];
fn = f.getName();
ft = f.getType();
ftn = ReflectUtils.getName(ft);
da = isp && (f.getDeclaringClass() == c) && (Modifier.isPrivate(f.getModifiers()) == false);
if (da) {
pm = null;
} else {
pm = pms.get(fn);
if (pm != null && (pm.type != ft || pm.setter == null || pm.getter == null))
pm = null;
}
crf.append("if( fc == ").append(i).append(" ) return;");
if (ft.isPrimitive()) {
if (ft == boolean.class) {
if (da) {
cwf.append("$2.writeBool(v.").append(fn).append(");");
crf.append("ret.").append(fn).append(" = $2.readBool();");
} else if (pm != null) {
cwf.append("$2.writeBool(v.").append(pm.getter).append("());");
crf.append("ret.").append(pm.setter).append("($2.readBool());");
} else {
cwf.append("$2.writeBool(((Boolean)fields[").append(i).append("].get($1)).booleanValue());");
crf.append("fields[").append(i).append("].set(ret, ($w)$2.readBool());");
}
} else if (ft == byte.class) {
if (da) {
cwf.append("$2.writeByte(v.").append(fn).append(");");
crf.append("ret.").append(fn).append(" = $2.readByte();");
} else if (pm != null) {
cwf.append("$2.writeByte(v.").append(pm.getter).append("());");
crf.append("ret.").append(pm.setter).append("($2.readByte());");
} else {
cwf.append("$2.writeByte(((Byte)fields[").append(i).append("].get($1)).byteValue());");
crf.append("fields[").append(i).append("].set(ret, ($w)$2.readByte());");
}
} else if (ft == char.class) {
if (da) {
cwf.append("$2.writeShort((short)v.").append(fn).append(");");
crf.append("ret.").append(fn).append(" = (char)$2.readShort();");
} else if (pm != null) {
cwf.append("$2.writeShort((short)v.").append(pm.getter).append("());");
crf.append("ret.").append(pm.setter).append("((char)$2.readShort());");
} else {
cwf.append("$2.writeShort((short)((Character)fields[").append(i)
.append("].get($1)).charValue());");
crf.append("fields[").append(i).append("].set(ret, ($w)((char)$2.readShort()));");
}
} else if (ft == short.class) {
if (da) {
cwf.append("$2.writeShort(v.").append(fn).append(");");
crf.append("ret.").append(fn).append(" = $2.readShort();");
} else if (pm != null) {
cwf.append("$2.writeShort(v.").append(pm.getter).append("());");
crf.append("ret.").append(pm.setter).append("($2.readShort());");
} else {
cwf.append("$2.writeShort(((Short)fields[").append(i).append("].get($1)).shortValue());");
crf.append("fields[").append(i).append("].set(ret, ($w)$2.readShort());");
}
} else if (ft == int.class) {
if (da) {
cwf.append("$2.writeInt(v.").append(fn).append(");");
crf.append("ret.").append(fn).append(" = $2.readInt();");
} else if (pm != null) {
cwf.append("$2.writeInt(v.").append(pm.getter).append("());");
crf.append("ret.").append(pm.setter).append("($2.readInt());");
} else {
cwf.append("$2.writeInt(((Integer)fields[").append(i).append("].get($1)).intValue());");
crf.append("fields[").append(i).append("].set(ret, ($w)$2.readInt());");
}
} else if (ft == long.class) {
if (da) {
cwf.append("$2.writeLong(v.").append(fn).append(");");
crf.append("ret.").append(fn).append(" = $2.readLong();");
} else if (pm != null) {
cwf.append("$2.writeLong(v.").append(pm.getter).append("());");
crf.append("ret.").append(pm.setter).append("($2.readLong());");
} else {
cwf.append("$2.writeLong(((Long)fields[").append(i).append("].get($1)).longValue());");
crf.append("fields[").append(i).append("].set(ret, ($w)$2.readLong());");
}
} else if (ft == float.class) {
if (da) {
cwf.append("$2.writeFloat(v.").append(fn).append(");");
crf.append("ret.").append(fn).append(" = $2.readFloat();");
} else if (pm != null) {
cwf.append("$2.writeFloat(v.").append(pm.getter).append("());");
crf.append("ret.").append(pm.setter).append("($2.readFloat());");
} else {
cwf.append("$2.writeFloat(((Float)fields[").append(i).append("].get($1)).floatValue());");
crf.append("fields[").append(i).append("].set(ret, ($w)$2.readFloat());");
}
} else if (ft == double.class) {
if (da) {
cwf.append("$2.writeDouble(v.").append(fn).append(");");
crf.append("ret.").append(fn).append(" = $2.readDouble();");
} else if (pm != null) {
cwf.append("$2.writeDouble(v.").append(pm.getter).append("());");
crf.append("ret.").append(pm.setter).append("($2.readDouble());");
} else {
cwf.append("$2.writeDouble(((Double)fields[").append(i).append("].get($1)).doubleValue());");
crf.append("fields[").append(i).append("].set(ret, ($w)$2.readDouble());");
}
}
} else if (ft == c) {
if (da) {
cwf.append("this.writeTo(v.").append(fn).append(", $2);");
crf.append("ret.").append(fn).append(" = (").append(ftn).append(")this.parseFrom($2);");
} else if (pm != null) {
cwf.append("this.writeTo(v.").append(pm.getter).append("(), $2);");
crf.append("ret.").append(pm.setter).append("((").append(ftn).append(")this.parseFrom($2));");
} else {
cwf.append("this.writeTo((").append(ftn).append(")fields[").append(i).append("].get($1), $2);");
crf.append("fields[").append(i).append("].set(ret, this.parseFrom($2));");
}
} else {
int bc = builders.size();
builders.add(register(ft));
if (da) {
cwf.append("builders[").append(bc).append("].writeTo(v.").append(fn).append(", $2);");
crf.append("ret.").append(fn).append(" = (").append(ftn).append(")builders[").append(bc)
.append("].parseFrom($2);");
} else if (pm != null) {
cwf.append("builders[").append(bc).append("].writeTo(v.").append(pm.getter).append("(), $2);");
crf.append("ret.").append(pm.setter).append("((").append(ftn).append(")builders[").append(bc)
.append("].parseFrom($2));");
} else {
cwf.append("builders[").append(bc).append("].writeTo((").append(ftn).append(")fields[").append(i)
.append("].get($1), $2);");
crf.append("fields[").append(i).append("].set(ret, builders[").append(bc)
.append("].parseFrom($2));");
}
}
}
// skip any fields.
crf.append("for(int i=").append(fs.length).append(";i<fc;i++) $2.skipAny();");
// collection or map
if (isc) {
cwf.append(
"$2.writeInt(v.size()); for(java.util.Iterator it=v.iterator();it.hasNext();){ $2.writeObject(it.next()); }");
crf.append("int len = $2.readInt(); for(int i=0;i<len;i++) ret.add($2.readObject());");
} else if (ism) {
cwf.append(
"$2.writeInt(v.size()); for(java.util.Iterator it=v.entrySet().iterator();it.hasNext();){ java.util.Map.Entry entry = (java.util.Map.Entry)it.next(); $2.writeObject(entry.getKey()); $2.writeObject(entry.getValue()); }");
crf.append("int len = $2.readInt(); for(int i=0;i<len;i++) ret.put($2.readObject(), $2.readObject());");
}
cwf.append(" }");
crf.append(" }");
ClassGenerator cg = ClassGenerator.newInstance(cl);
cg.setClassName(bcn);
cg.setSuperClass(AbstractObjectBuilder.class);
cg.addDefaultConstructor();
cg.addField("public static java.lang.reflect.Field[] fields;");
cg.addField("public static " + BUILDER_CLASS_NAME + "[] builders;");
if (!dn)
cg.addField("public static java.lang.reflect.Constructor constructor;");
cg.addMethod("public Class getType(){ return " + cn + ".class; }");
cg.addMethod(cwf.toString());
cg.addMethod(crf.toString());
cg.addMethod(cni.toString());
try {
Class<?> wc = cg.toClass();
// set static field
wc.getField("fields").set(null, fs);
wc.getField("builders").set(null, builders.toArray(new Builder<?>[0]));
if (!dn)
wc.getField("constructor").set(null, con);
return (Builder<?>) wc.newInstance();
} catch (RuntimeException e) {
throw e;
} catch (Throwable e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
cg.release();
}
}
private static Builder<?> newEnumBuilder(Class<?> c) {
ClassLoader cl = ClassHelper.getCallerClassLoader(Builder.class);
String cn = c.getName();
String bcn = BUILDER_CLASS_NAME + "$bc" + BUILDER_CLASS_COUNTER.getAndIncrement();
StringBuilder cwt = new StringBuilder("public void writeTo(Object obj, ")
.append(GenericObjectOutput.class.getName()).append(" out) throws java.io.IOException{"); // writeTo
// code.
cwt.append(cn).append(" v = (").append(cn).append(")$1;");
cwt.append("if( $1 == null ){ $2.writeUTF(null); }else{ $2.writeUTF(v.name()); } }");
StringBuilder cpf = new StringBuilder("public Object parseFrom(").append(GenericObjectInput.class.getName())
.append(" in) throws java.io.IOException{"); // parseFrom code.
cpf.append("String name = $1.readUTF(); if( name == null ) return null; return (").append(cn)
.append(")Enum.valueOf(").append(cn).append(".class, name); }");
ClassGenerator cg = ClassGenerator.newInstance(cl);
cg.setClassName(bcn);
cg.setSuperClass(Builder.class);
cg.addDefaultConstructor();
cg.addMethod("public Class getType(){ return " + cn + ".class; }");
cg.addMethod(cwt.toString());
cg.addMethod(cpf.toString());
try {
Class<?> wc = cg.toClass();
return (Builder<?>) wc.newInstance();
} catch (RuntimeException e) {
throw e;
} catch (Throwable e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
cg.release();
}
}
private static Map<String, PropertyMetadata> propertyMetadatas(Class<?> c) {
Map<String, Method> mm = new HashMap<String, Method>(); // method map.
Map<String, PropertyMetadata> ret = new HashMap<String, PropertyMetadata>(); // property
// metadata
// map.
// All public method.
for (Method m : c.getMethods()) {
if (m.getDeclaringClass() == Object.class) // Ignore Object's
// method.
continue;
mm.put(ReflectUtils.getDesc(m), m);
}
Matcher matcher;
for (Map.Entry<String, Method> entry : mm.entrySet()) {
String desc = entry.getKey();
Method method = entry.getValue();
if ((matcher = ReflectUtils.GETTER_METHOD_DESC_PATTERN.matcher(desc)).matches()
|| (matcher = ReflectUtils.IS_HAS_CAN_METHOD_DESC_PATTERN.matcher(desc)).matches()) {
String pn = propertyName(matcher.group(1));
Class<?> pt = method.getReturnType();
PropertyMetadata pm = ret.get(pn);
if (pm == null) {
pm = new PropertyMetadata();
pm.type = pt;
ret.put(pn, pm);
} else {
if (pm.type != pt)
continue;
}
pm.getter = method.getName();
} else if ((matcher = ReflectUtils.SETTER_METHOD_DESC_PATTERN.matcher(desc)).matches()) {
String pn = propertyName(matcher.group(1));
Class<?> pt = method.getParameterTypes()[0];
PropertyMetadata pm = ret.get(pn);
if (pm == null) {
pm = new PropertyMetadata();
pm.type = pt;
ret.put(pn, pm);
} else {
if (pm.type != pt)
continue;
}
pm.setter = method.getName();
}
}
return ret;
}
private static String propertyName(String s) {
return s.length() == 1 || Character.isLowerCase(s.charAt(1))
? Character.toLowerCase(s.charAt(0)) + s.substring(1) : s;
}
private static boolean serializeIgnoreFinalModifier(Class cl) {
// if (cl.isAssignableFrom(BigInteger.class)) return false;
// for performance
// if (cl.getName().startsWith("java")) return true;
// if (cl.getName().startsWith("javax")) return true;
return false;
}
@SuppressWarnings("unused")
private static boolean isPrimitiveOrPrimitiveArray1(Class<?> cl) {
if (cl.isPrimitive()) {
return true;
} else {
Class clazz = cl.getClass().getComponentType();
if (clazz != null && clazz.isPrimitive()) {
return true;
}
}
return false;
}
private static String defaultArg(Class<?> cl) {
if (boolean.class == cl)
return "false";
if (int.class == cl)
return "0";
if (long.class == cl)
return "0l";
if (double.class == cl)
return "(double)0";
if (float.class == cl)
return "(float)0";
if (short.class == cl)
return "(short)0";
if (char.class == cl)
return "(char)0";
if (byte.class == cl)
return "(byte)0";
if (byte[].class == cl)
return "new byte[]{0}";
if (!cl.isPrimitive())
return "null";
throw new UnsupportedOperationException();
}
private static int compareFieldName(String n1, String n2) {
int l = Math.min(n1.length(), n2.length());
for (int i = 0; i < l; i++) {
int t = n1.charAt(i) - n2.charAt(i);
if (t != 0)
return t;
}
return n1.length() - n2.length();
}
private static void addDesc(Class<?> c) {
String desc = ReflectUtils.getDesc(c);
int index = mDescList.size();
mDescList.add(desc);
mDescMap.put(desc, index);
}
static class PropertyMetadata {
Class<?> type;
String setter, getter;
}
public static abstract class AbstractObjectBuilder<T> extends Builder<T> {
abstract public Class<T> getType();
public void writeTo(T obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
int ref = out.getRef(obj);
if (ref < 0) {
out.addRef(obj);
out.write0(OBJECT);
writeObject(obj, out);
} else {
out.write0(OBJECT_REF);
out.writeUInt(ref);
}
}
}
public T parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
switch (b) {
case OBJECT: {
T ret = newInstance(in);
in.addRef(ret);
readObject(ret, in);
return ret;
}
case OBJECT_REF:
return (T) in.getRef(in.readUInt());
case OBJECT_NULL:
return null;
default:
throw new IOException("Input format error, expect OBJECT|OBJECT_REF|OBJECT_NULL, get " + b);
}
}
abstract protected void writeObject(T obj, GenericObjectOutput out) throws IOException;
abstract protected T newInstance(GenericObjectInput in) throws IOException;
abstract protected void readObject(T ret, GenericObjectInput in) throws IOException;
}
static final Builder<Object> GenericBuilder = new Builder<Object>() {
@Override
public Class<Object> getType() {
return Object.class;
}
@Override
public void writeTo(Object obj, GenericObjectOutput out) throws IOException {
out.writeObject(obj);
}
@Override
public Object parseFrom(GenericObjectInput in) throws IOException {
return in.readObject();
}
};
static final Builder<Object[]> GenericArrayBuilder = new AbstractObjectBuilder<Object[]>() {
@Override
public Class<Object[]> getType() {
return Object[].class;
}
@Override
protected Object[] newInstance(GenericObjectInput in) throws IOException {
return new Object[in.readUInt()];
}
@Override
protected void readObject(Object[] ret, GenericObjectInput in) throws IOException {
for (int i = 0; i < ret.length; i++)
ret[i] = in.readObject();
}
@Override
protected void writeObject(Object[] obj, GenericObjectOutput out) throws IOException {
out.writeUInt(obj.length);
for (Object item : obj)
out.writeObject(item);
}
};
static final Builder<Serializable> SerializableBuilder = new Builder<Serializable>() {
@Override
public Class<Serializable> getType() {
return Serializable.class;
}
@SuppressWarnings("resource")
@Override
public void writeTo(Serializable obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
out.write0(OBJECT_STREAM);
UnsafeByteArrayOutputStream bos = new UnsafeByteArrayOutputStream();
CompactedObjectOutputStream oos = new CompactedObjectOutputStream(bos);
oos.writeObject(obj);
oos.flush();
bos.close();
byte[] b = bos.toByteArray();
out.writeUInt(b.length);
out.write0(b, 0, b.length);
}
}
@SuppressWarnings("resource")
@Override
public Serializable parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
if (b == OBJECT_NULL)
return null;
if (b != OBJECT_STREAM)
throw new IOException("Input format error, expect OBJECT_NULL|OBJECT_STREAM, get " + b + ".");
UnsafeByteArrayInputStream bis = new UnsafeByteArrayInputStream(in.read0(in.readUInt()));
CompactedObjectInputStream ois = new CompactedObjectInputStream(bis);
try {
return (Serializable) ois.readObject();
} catch (ClassNotFoundException e) {
throw new IOException(StringUtils.toString(e));
}
}
};
static {
addDesc(boolean[].class);
addDesc(byte[].class);
addDesc(char[].class);
addDesc(short[].class);
addDesc(int[].class);
addDesc(long[].class);
addDesc(float[].class);
addDesc(double[].class);
addDesc(Boolean.class);
addDesc(Byte.class);
addDesc(Character.class);
addDesc(Short.class);
addDesc(Integer.class);
addDesc(Long.class);
addDesc(Float.class);
addDesc(Double.class);
addDesc(String.class);
addDesc(String[].class);
addDesc(ArrayList.class);
addDesc(HashMap.class);
addDesc(HashSet.class);
addDesc(Date.class);
addDesc(java.sql.Date.class);
addDesc(java.sql.Time.class);
addDesc(java.sql.Timestamp.class);
addDesc(java.util.LinkedList.class);
addDesc(java.util.LinkedHashMap.class);
addDesc(java.util.LinkedHashSet.class);
register(byte[].class, new Builder<byte[]>() {
@Override
public Class<byte[]> getType() {
return byte[].class;
}
@Override
public void writeTo(byte[] obj, GenericObjectOutput out) throws IOException {
out.writeBytes(obj);
}
@Override
public byte[] parseFrom(GenericObjectInput in) throws IOException {
return in.readBytes();
}
});
register(Boolean.class, new Builder<Boolean>() {
@Override
public Class<Boolean> getType() {
return Boolean.class;
}
@Override
public void writeTo(Boolean obj, GenericObjectOutput out) throws IOException {
if (obj == null)
out.write0(VARINT_N1);
else if (obj.booleanValue())
out.write0(VARINT_1);
else
out.write0(VARINT_0);
}
@Override
public Boolean parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
switch (b) {
case VARINT_N1:
return null;
case VARINT_0:
return Boolean.FALSE;
case VARINT_1:
return Boolean.TRUE;
default:
throw new IOException("Input format error, expect VARINT_N1|VARINT_0|VARINT_1, get " + b + ".");
}
}
});
register(Byte.class, new Builder<Byte>() {
@Override
public Class<Byte> getType() {
return Byte.class;
}
@Override
public void writeTo(Byte obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
out.write0(OBJECT_VALUE);
out.writeByte(obj.byteValue());
}
}
@Override
public Byte parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
if (b == OBJECT_NULL)
return null;
if (b != OBJECT_VALUE)
throw new IOException("Input format error, expect OBJECT_NULL|OBJECT_VALUE, get " + b + ".");
return Byte.valueOf(in.readByte());
}
});
register(Character.class, new Builder<Character>() {
@Override
public Class<Character> getType() {
return Character.class;
}
@Override
public void writeTo(Character obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
out.write0(OBJECT_VALUE);
out.writeShort((short) obj.charValue());
}
}
@Override
public Character parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
if (b == OBJECT_NULL)
return null;
if (b != OBJECT_VALUE)
throw new IOException("Input format error, expect OBJECT_NULL|OBJECT_VALUE, get " + b + ".");
return Character.valueOf((char) in.readShort());
}
});
register(Short.class, new Builder<Short>() {
@Override
public Class<Short> getType() {
return Short.class;
}
@Override
public void writeTo(Short obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
out.write0(OBJECT_VALUE);
out.writeShort(obj.shortValue());
}
}
@Override
public Short parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
if (b == OBJECT_NULL)
return null;
if (b != OBJECT_VALUE)
throw new IOException("Input format error, expect OBJECT_NULL|OBJECT_VALUE, get " + b + ".");
return Short.valueOf(in.readShort());
}
});
register(Integer.class, new Builder<Integer>() {
@Override
public Class<Integer> getType() {
return Integer.class;
}
@Override
public void writeTo(Integer obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
out.write0(OBJECT_VALUE);
out.writeInt(obj.intValue());
}
}
@Override
public Integer parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
if (b == OBJECT_NULL)
return null;
if (b != OBJECT_VALUE)
throw new IOException("Input format error, expect OBJECT_NULL|OBJECT_VALUE, get " + b + ".");
return Integer.valueOf(in.readInt());
}
});
register(Long.class, new Builder<Long>() {
@Override
public Class<Long> getType() {
return Long.class;
}
@Override
public void writeTo(Long obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
out.write0(OBJECT_VALUE);
out.writeLong(obj.longValue());
}
}
@Override
public Long parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
if (b == OBJECT_NULL)
return null;
if (b != OBJECT_VALUE)
throw new IOException("Input format error, expect OBJECT_NULL|OBJECT_VALUE, get " + b + ".");
return Long.valueOf(in.readLong());
}
});
register(Float.class, new Builder<Float>() {
@Override
public Class<Float> getType() {
return Float.class;
}
@Override
public void writeTo(Float obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
out.write0(OBJECT_VALUE);
out.writeFloat(obj.floatValue());
}
}
@Override
public Float parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
if (b == OBJECT_NULL)
return null;
if (b != OBJECT_VALUE)
throw new IOException("Input format error, expect OBJECT_NULL|OBJECT_VALUE, get " + b + ".");
return new Float(in.readFloat());
}
});
register(Double.class, new Builder<Double>() {
@Override
public Class<Double> getType() {
return Double.class;
}
@Override
public void writeTo(Double obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
out.write0(OBJECT_VALUE);
out.writeDouble(obj.doubleValue());
}
}
@Override
public Double parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
if (b == OBJECT_NULL)
return null;
if (b != OBJECT_VALUE)
throw new IOException("Input format error, expect OBJECT_NULL|OBJECT_VALUE, get " + b + ".");
return new Double(in.readDouble());
}
});
register(String.class, new Builder<String>() {
@Override
public Class<String> getType() {
return String.class;
}
@Override
public String parseFrom(GenericObjectInput in) throws IOException {
return in.readUTF();
}
@Override
public void writeTo(String obj, GenericObjectOutput out) throws IOException {
out.writeUTF(obj);
}
});
register(StringBuilder.class, new Builder<StringBuilder>() {
@Override
public Class<StringBuilder> getType() {
return StringBuilder.class;
}
@Override
public StringBuilder parseFrom(GenericObjectInput in) throws IOException {
return new StringBuilder(in.readUTF());
}
@Override
public void writeTo(StringBuilder obj, GenericObjectOutput out) throws IOException {
out.writeUTF(obj.toString());
}
});
register(StringBuffer.class, new Builder<StringBuffer>() {
@Override
public Class<StringBuffer> getType() {
return StringBuffer.class;
}
@Override
public StringBuffer parseFrom(GenericObjectInput in) throws IOException {
return new StringBuffer(in.readUTF());
}
@Override
public void writeTo(StringBuffer obj, GenericObjectOutput out) throws IOException {
out.writeUTF(obj.toString());
}
});
// java.util
register(ArrayList.class, new Builder<ArrayList>() {
@Override
public Class<ArrayList> getType() {
return ArrayList.class;
}
@Override
public void writeTo(ArrayList obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
out.write0(OBJECT_VALUES);
out.writeUInt(obj.size());
for (Object item : obj)
out.writeObject(item);
}
}
@Override
public ArrayList parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
if (b == OBJECT_NULL)
return null;
if (b != OBJECT_VALUES)
throw new IOException("Input format error, expect OBJECT_NULL|OBJECT_VALUES, get " + b + ".");
int len = in.readUInt();
ArrayList ret = new ArrayList(len);
for (int i = 0; i < len; i++)
ret.add(in.readObject());
return ret;
}
});
register(HashMap.class, new Builder<HashMap>() {
@Override
public Class<HashMap> getType() {
return HashMap.class;
}
@Override
public void writeTo(HashMap obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
out.write0(OBJECT_MAP);
out.writeUInt(obj.size());
for (Map.Entry entry : (Set<Map.Entry>) obj.entrySet()) {
out.writeObject(entry.getKey());
out.writeObject(entry.getValue());
}
}
}
@Override
public HashMap parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
if (b == OBJECT_NULL)
return null;
if (b != OBJECT_MAP)
throw new IOException("Input format error, expect OBJECT_NULL|OBJECT_MAP, get " + b + ".");
int len = in.readUInt();
HashMap ret = new HashMap(len);
for (int i = 0; i < len; i++)
ret.put(in.readObject(), in.readObject());
return ret;
}
});
register(HashSet.class, new Builder<HashSet>() {
@Override
public Class<HashSet> getType() {
return HashSet.class;
}
@Override
public void writeTo(HashSet obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
out.write0(OBJECT_VALUES);
out.writeUInt(obj.size());
for (Object item : obj)
out.writeObject(item);
}
}
@Override
public HashSet parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
if (b == OBJECT_NULL)
return null;
if (b != OBJECT_VALUES)
throw new IOException("Input format error, expect OBJECT_NULL|OBJECT_VALUES, get " + b + ".");
int len = in.readUInt();
HashSet ret = new HashSet(len);
for (int i = 0; i < len; i++)
ret.add(in.readObject());
return ret;
}
});
register(Date.class, new Builder<Date>() {
@Override
public Class<Date> getType() {
return Date.class;
}
@Override
public void writeTo(Date obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
out.write0(OBJECT_VALUE);
out.writeLong(obj.getTime());
}
}
@Override
public Date parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
if (b == OBJECT_NULL)
return null;
if (b != OBJECT_VALUE)
throw new IOException("Input format error, expect OBJECT_NULL|OBJECT_VALUE, get " + b + ".");
return new Date(in.readLong());
}
});
// java.sql
register(java.sql.Date.class, new Builder<java.sql.Date>() {
@Override
public Class<java.sql.Date> getType() {
return java.sql.Date.class;
}
@Override
public void writeTo(java.sql.Date obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
out.write0(OBJECT_VALUE);
out.writeLong(obj.getTime());
}
}
@Override
public java.sql.Date parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
if (b == OBJECT_NULL)
return null;
if (b != OBJECT_VALUE)
throw new IOException("Input format error, expect OBJECT_NULL|OBJECT_VALUE, get " + b + ".");
return new java.sql.Date(in.readLong());
}
});
register(java.sql.Timestamp.class, new Builder<java.sql.Timestamp>() {
@Override
public Class<java.sql.Timestamp> getType() {
return java.sql.Timestamp.class;
}
@Override
public void writeTo(java.sql.Timestamp obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
out.write0(OBJECT_VALUE);
out.writeLong(obj.getTime());
}
}
@Override
public java.sql.Timestamp parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
if (b == OBJECT_NULL)
return null;
if (b != OBJECT_VALUE)
throw new IOException("Input format error, expect OBJECT_NULL|OBJECT_VALUE, get " + b + ".");
return new java.sql.Timestamp(in.readLong());
}
});
register(java.sql.Time.class, new Builder<java.sql.Time>() {
@Override
public Class<java.sql.Time> getType() {
return java.sql.Time.class;
}
@Override
public void writeTo(java.sql.Time obj, GenericObjectOutput out) throws IOException {
if (obj == null) {
out.write0(OBJECT_NULL);
} else {
out.write0(OBJECT_VALUE);
out.writeLong(obj.getTime());
}
}
@Override
public java.sql.Time parseFrom(GenericObjectInput in) throws IOException {
byte b = in.read0();
if (b == OBJECT_NULL)
return null;
if (b != OBJECT_VALUE)
throw new IOException("Input format error, expect OBJECT_NULL|OBJECT_VALUE, get " + b + ".");
return new java.sql.Time(in.readLong());
}
});
}
}
| |
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import edu.princeton.cs.algs4.In;
import edu.princeton.cs.algs4.Digraph;
import edu.princeton.cs.algs4.StdIn;
import edu.princeton.cs.algs4.StdOut;
public class SAP {
private class VertexPair {
private final int v;
private final int w;
public VertexPair(int v, int w) {
if(v < w) {
this.v = v;
this.w = w;
}
else {
this.v = w;
this.w = v;
}
}
@Override
public int hashCode() {
return 37 * this.v + this.w + 1;
}
@Override
public boolean equals(Object obj) {
if(obj == null || this.getClass() != obj.getClass()) {
return false;
}
VertexPair that = (VertexPair) obj;
return this.v == that.v && this.w == that.w;
}
}
private class Relation {
private int ancestor;
private int distance;
public Relation(int ancestor, int distance) {
this.ancestor = ancestor;
this.distance = distance;
}
}
private class Node {
private final int id;
private final int origin;
private final int distFromOrigin;
public Node(int id, int origin, int distFromOrigin) {
this.id = id;
this.origin = origin;
this.distFromOrigin = distFromOrigin;
}
}
private Digraph graph;
private DigraphBFS bfs;
private HashMap<VertexPair, Relation> relations;
public SAP(Digraph G) {
this.graph = G;
this.bfs = new DigraphBFS(G);
this.relations = new HashMap<VertexPair, Relation>();
}
private Relation computeRelation(int v, int w) {
if(v < 0 || v > this.graph.V() - 1) {
throw new IllegalArgumentException("Invalid vertex supplied: " + v);
}
if(w < 0 || w > this.graph.V() - 1) {
throw new IllegalArgumentException("Invalid vertex supplied: " + w);
}
VertexPair key = new VertexPair(v, w);
if(this.relations.containsKey(key)) {
return this.relations.get(key);
}
int ancestor = -1;
int distance = -1;
ArrayDeque<Node> queue = new ArrayDeque<Node>();
HashSet<Integer> closedV = new HashSet<Integer>();
HashSet<Integer> closedW = new HashSet<Integer>();
queue.add(new Node(v, v, 0));
queue.add(new Node(w, w, 0));
while(!queue.isEmpty()) {
Node current = queue.remove();
if(this.bfs.hasPath(v, current.id) && this.bfs.hasPath(w, current.id)) {
ArrayList<Integer> pathV = this.bfs.path(v, current.id);
ArrayList<Integer> pathW = this.bfs.path(w, current.id);
int d = pathV.size() + pathW.size() - 2;
if(distance == -1 || d < distance) {
distance = d;
HashSet<Integer> pathVSet = new HashSet<Integer>(pathV.size());
for(int n : pathV) {
pathVSet.add(n);
}
for(int n : pathW) {
if(pathVSet.contains(n)) {
ancestor = n;
break;
}
}
}
}
for(int n : this.graph.adj(current.id)) {
if(current.origin == v && (distance == -1 || current.distFromOrigin < distance) && !closedV.contains(current.id)) {
queue.add(new Node(n, v, current.distFromOrigin + 1));
}
else if((distance == -1 || current.distFromOrigin < distance) && !closedW.contains(current.id)) {
queue.add(new Node(n, w, current.distFromOrigin + 1));
}
}
if(current.origin == v) {
closedV.add(current.id);
}
else {
closedW.add(current.id);
}
}
Relation relation = new Relation(ancestor, distance);
this.relations.put(key, relation);
return relation;
}
// length of shortest ancestral path between v and w; -1 if no such path
public int length(int v, int w) {
return this.computeRelation(v, w).distance;
}
// a common ancestor of v and w that participates in a shortest ancestral path; -1 if no such path
public int ancestor(int v, int w) {
return this.computeRelation(v, w).ancestor;
}
// length of shortest ancestral path between any vertex in v and any vertex in w; -1 if no such path
public int length(Iterable<Integer> v, Iterable<Integer> w) {
if(v == null || w == null) {
throw new IllegalArgumentException("List of vertices cannot be null");
}
int shortest = -1;
for(int i : v) {
for(int j : w) {
int d = this.computeRelation(i, j).distance;
if(shortest == -1 || (d != -1 && d < shortest)) {
shortest = d;
}
}
}
return shortest;
}
// a common ancestor that participates in shortest ancestral path; -1 if no such path
public int ancestor(Iterable<Integer> v, Iterable<Integer> w) {
if(v == null || w == null) {
throw new IllegalArgumentException("List of vertices cannot be null");
}
int ancestor = -1;
int shortest = -1;
for(int i : v) {
for(int j : w) {
Relation relation = this.computeRelation(i, j);
if(shortest == -1 || (relation.distance != -1 && relation.distance < shortest)) {
ancestor = relation.ancestor;
shortest = relation.distance;
}
}
}
return ancestor;
}
public static void main(String[] args) {
In in = new In(args[0]);
Digraph G = new Digraph(in);
SAP sap = new SAP(G);
while(!StdIn.isEmpty()) {
int v = StdIn.readInt();
int w = StdIn.readInt();
int length = sap.length(v, w);
int ancestor = sap.ancestor(v, w);
StdOut.printf("length = %d, ancestor = %d\n", length, ancestor);
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: storedserverpaymentchannel.proto
package com.rimbit.rimbit.protocols.channels;
public final class ServerState {
private ServerState() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public interface StoredServerPaymentChannelsOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .paymentchannels.StoredServerPaymentChannel channels = 1;
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
java.util.List<com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel>
getChannelsList();
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel getChannels(int index);
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
int getChannelsCount();
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
java.util.List<? extends com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannelOrBuilder>
getChannelsOrBuilderList();
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannelOrBuilder getChannelsOrBuilder(
int index);
}
/**
* Protobuf type {@code paymentchannels.StoredServerPaymentChannels}
*
* <pre>
* A set of StoredPaymentChannel's
* </pre>
*/
public static final class StoredServerPaymentChannels extends
com.google.protobuf.GeneratedMessage
implements StoredServerPaymentChannelsOrBuilder {
// Use StoredServerPaymentChannels.newBuilder() to construct.
private StoredServerPaymentChannels(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private StoredServerPaymentChannels(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final StoredServerPaymentChannels defaultInstance;
public static StoredServerPaymentChannels getDefaultInstance() {
return defaultInstance;
}
public StoredServerPaymentChannels getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private StoredServerPaymentChannels(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
channels_ = new java.util.ArrayList<com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel>();
mutable_bitField0_ |= 0x00000001;
}
channels_.add(input.readMessage(com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
channels_ = java.util.Collections.unmodifiableList(channels_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.rimbit.rimbit.protocols.channels.ServerState.internal_static_paymentchannels_StoredServerPaymentChannels_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.rimbit.rimbit.protocols.channels.ServerState.internal_static_paymentchannels_StoredServerPaymentChannels_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels.class, com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels.Builder.class);
}
public static com.google.protobuf.Parser<StoredServerPaymentChannels> PARSER =
new com.google.protobuf.AbstractParser<StoredServerPaymentChannels>() {
public StoredServerPaymentChannels parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new StoredServerPaymentChannels(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<StoredServerPaymentChannels> getParserForType() {
return PARSER;
}
// repeated .paymentchannels.StoredServerPaymentChannel channels = 1;
public static final int CHANNELS_FIELD_NUMBER = 1;
private java.util.List<com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel> channels_;
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public java.util.List<com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel> getChannelsList() {
return channels_;
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public java.util.List<? extends com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannelOrBuilder>
getChannelsOrBuilderList() {
return channels_;
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public int getChannelsCount() {
return channels_.size();
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel getChannels(int index) {
return channels_.get(index);
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannelOrBuilder getChannelsOrBuilder(
int index) {
return channels_.get(index);
}
private void initFields() {
channels_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
for (int i = 0; i < getChannelsCount(); i++) {
if (!getChannels(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < channels_.size(); i++) {
output.writeMessage(1, channels_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < channels_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, channels_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code paymentchannels.StoredServerPaymentChannels}
*
* <pre>
* A set of StoredPaymentChannel's
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannelsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.rimbit.rimbit.protocols.channels.ServerState.internal_static_paymentchannels_StoredServerPaymentChannels_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.rimbit.rimbit.protocols.channels.ServerState.internal_static_paymentchannels_StoredServerPaymentChannels_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels.class, com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels.Builder.class);
}
// Construct using com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getChannelsFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (channelsBuilder_ == null) {
channels_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
channelsBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.rimbit.rimbit.protocols.channels.ServerState.internal_static_paymentchannels_StoredServerPaymentChannels_descriptor;
}
public com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels getDefaultInstanceForType() {
return com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels.getDefaultInstance();
}
public com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels build() {
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels buildPartial() {
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels result = new com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels(this);
int from_bitField0_ = bitField0_;
if (channelsBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
channels_ = java.util.Collections.unmodifiableList(channels_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.channels_ = channels_;
} else {
result.channels_ = channelsBuilder_.build();
}
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels) {
return mergeFrom((com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels other) {
if (other == com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels.getDefaultInstance()) return this;
if (channelsBuilder_ == null) {
if (!other.channels_.isEmpty()) {
if (channels_.isEmpty()) {
channels_ = other.channels_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureChannelsIsMutable();
channels_.addAll(other.channels_);
}
onChanged();
}
} else {
if (!other.channels_.isEmpty()) {
if (channelsBuilder_.isEmpty()) {
channelsBuilder_.dispose();
channelsBuilder_ = null;
channels_ = other.channels_;
bitField0_ = (bitField0_ & ~0x00000001);
channelsBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getChannelsFieldBuilder() : null;
} else {
channelsBuilder_.addAllMessages(other.channels_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
for (int i = 0; i < getChannelsCount(); i++) {
if (!getChannels(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannels) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .paymentchannels.StoredServerPaymentChannel channels = 1;
private java.util.List<com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel> channels_ =
java.util.Collections.emptyList();
private void ensureChannelsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
channels_ = new java.util.ArrayList<com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel>(channels_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel, com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.Builder, com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannelOrBuilder> channelsBuilder_;
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public java.util.List<com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel> getChannelsList() {
if (channelsBuilder_ == null) {
return java.util.Collections.unmodifiableList(channels_);
} else {
return channelsBuilder_.getMessageList();
}
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public int getChannelsCount() {
if (channelsBuilder_ == null) {
return channels_.size();
} else {
return channelsBuilder_.getCount();
}
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel getChannels(int index) {
if (channelsBuilder_ == null) {
return channels_.get(index);
} else {
return channelsBuilder_.getMessage(index);
}
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public Builder setChannels(
int index, com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel value) {
if (channelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureChannelsIsMutable();
channels_.set(index, value);
onChanged();
} else {
channelsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public Builder setChannels(
int index, com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.Builder builderForValue) {
if (channelsBuilder_ == null) {
ensureChannelsIsMutable();
channels_.set(index, builderForValue.build());
onChanged();
} else {
channelsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public Builder addChannels(com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel value) {
if (channelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureChannelsIsMutable();
channels_.add(value);
onChanged();
} else {
channelsBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public Builder addChannels(
int index, com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel value) {
if (channelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureChannelsIsMutable();
channels_.add(index, value);
onChanged();
} else {
channelsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public Builder addChannels(
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.Builder builderForValue) {
if (channelsBuilder_ == null) {
ensureChannelsIsMutable();
channels_.add(builderForValue.build());
onChanged();
} else {
channelsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public Builder addChannels(
int index, com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.Builder builderForValue) {
if (channelsBuilder_ == null) {
ensureChannelsIsMutable();
channels_.add(index, builderForValue.build());
onChanged();
} else {
channelsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public Builder addAllChannels(
java.lang.Iterable<? extends com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel> values) {
if (channelsBuilder_ == null) {
ensureChannelsIsMutable();
super.addAll(values, channels_);
onChanged();
} else {
channelsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public Builder clearChannels() {
if (channelsBuilder_ == null) {
channels_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
channelsBuilder_.clear();
}
return this;
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public Builder removeChannels(int index) {
if (channelsBuilder_ == null) {
ensureChannelsIsMutable();
channels_.remove(index);
onChanged();
} else {
channelsBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.Builder getChannelsBuilder(
int index) {
return getChannelsFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannelOrBuilder getChannelsOrBuilder(
int index) {
if (channelsBuilder_ == null) {
return channels_.get(index); } else {
return channelsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public java.util.List<? extends com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannelOrBuilder>
getChannelsOrBuilderList() {
if (channelsBuilder_ != null) {
return channelsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(channels_);
}
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.Builder addChannelsBuilder() {
return getChannelsFieldBuilder().addBuilder(
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.getDefaultInstance());
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.Builder addChannelsBuilder(
int index) {
return getChannelsFieldBuilder().addBuilder(
index, com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.getDefaultInstance());
}
/**
* <code>repeated .paymentchannels.StoredServerPaymentChannel channels = 1;</code>
*/
public java.util.List<com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.Builder>
getChannelsBuilderList() {
return getChannelsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel, com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.Builder, com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannelOrBuilder>
getChannelsFieldBuilder() {
if (channelsBuilder_ == null) {
channelsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel, com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.Builder, com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannelOrBuilder>(
channels_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
channels_ = null;
}
return channelsBuilder_;
}
// @@protoc_insertion_point(builder_scope:paymentchannels.StoredServerPaymentChannels)
}
static {
defaultInstance = new StoredServerPaymentChannels(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:paymentchannels.StoredServerPaymentChannels)
}
public interface StoredServerPaymentChannelOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required uint64 bestValueToMe = 1;
/**
* <code>required uint64 bestValueToMe = 1;</code>
*/
boolean hasBestValueToMe();
/**
* <code>required uint64 bestValueToMe = 1;</code>
*/
long getBestValueToMe();
// optional bytes bestValueSignature = 2;
/**
* <code>optional bytes bestValueSignature = 2;</code>
*/
boolean hasBestValueSignature();
/**
* <code>optional bytes bestValueSignature = 2;</code>
*/
com.google.protobuf.ByteString getBestValueSignature();
// required uint64 refundTransactionUnlockTimeSecs = 3;
/**
* <code>required uint64 refundTransactionUnlockTimeSecs = 3;</code>
*/
boolean hasRefundTransactionUnlockTimeSecs();
/**
* <code>required uint64 refundTransactionUnlockTimeSecs = 3;</code>
*/
long getRefundTransactionUnlockTimeSecs();
// required bytes contractTransaction = 4;
/**
* <code>required bytes contractTransaction = 4;</code>
*/
boolean hasContractTransaction();
/**
* <code>required bytes contractTransaction = 4;</code>
*/
com.google.protobuf.ByteString getContractTransaction();
// required bytes clientOutput = 5;
/**
* <code>required bytes clientOutput = 5;</code>
*/
boolean hasClientOutput();
/**
* <code>required bytes clientOutput = 5;</code>
*/
com.google.protobuf.ByteString getClientOutput();
// required bytes myKey = 6;
/**
* <code>required bytes myKey = 6;</code>
*/
boolean hasMyKey();
/**
* <code>required bytes myKey = 6;</code>
*/
com.google.protobuf.ByteString getMyKey();
}
/**
* Protobuf type {@code paymentchannels.StoredServerPaymentChannel}
*
* <pre>
* A server-side payment channel in serialized form, which can be reloaded later if the server restarts
* </pre>
*/
public static final class StoredServerPaymentChannel extends
com.google.protobuf.GeneratedMessage
implements StoredServerPaymentChannelOrBuilder {
// Use StoredServerPaymentChannel.newBuilder() to construct.
private StoredServerPaymentChannel(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private StoredServerPaymentChannel(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final StoredServerPaymentChannel defaultInstance;
public static StoredServerPaymentChannel getDefaultInstance() {
return defaultInstance;
}
public StoredServerPaymentChannel getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private StoredServerPaymentChannel(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
bestValueToMe_ = input.readUInt64();
break;
}
case 18: {
bitField0_ |= 0x00000002;
bestValueSignature_ = input.readBytes();
break;
}
case 24: {
bitField0_ |= 0x00000004;
refundTransactionUnlockTimeSecs_ = input.readUInt64();
break;
}
case 34: {
bitField0_ |= 0x00000008;
contractTransaction_ = input.readBytes();
break;
}
case 42: {
bitField0_ |= 0x00000010;
clientOutput_ = input.readBytes();
break;
}
case 50: {
bitField0_ |= 0x00000020;
myKey_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.rimbit.rimbit.protocols.channels.ServerState.internal_static_paymentchannels_StoredServerPaymentChannel_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.rimbit.rimbit.protocols.channels.ServerState.internal_static_paymentchannels_StoredServerPaymentChannel_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.class, com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.Builder.class);
}
public static com.google.protobuf.Parser<StoredServerPaymentChannel> PARSER =
new com.google.protobuf.AbstractParser<StoredServerPaymentChannel>() {
public StoredServerPaymentChannel parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new StoredServerPaymentChannel(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<StoredServerPaymentChannel> getParserForType() {
return PARSER;
}
private int bitField0_;
// required uint64 bestValueToMe = 1;
public static final int BESTVALUETOME_FIELD_NUMBER = 1;
private long bestValueToMe_;
/**
* <code>required uint64 bestValueToMe = 1;</code>
*/
public boolean hasBestValueToMe() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required uint64 bestValueToMe = 1;</code>
*/
public long getBestValueToMe() {
return bestValueToMe_;
}
// optional bytes bestValueSignature = 2;
public static final int BESTVALUESIGNATURE_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString bestValueSignature_;
/**
* <code>optional bytes bestValueSignature = 2;</code>
*/
public boolean hasBestValueSignature() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes bestValueSignature = 2;</code>
*/
public com.google.protobuf.ByteString getBestValueSignature() {
return bestValueSignature_;
}
// required uint64 refundTransactionUnlockTimeSecs = 3;
public static final int REFUNDTRANSACTIONUNLOCKTIMESECS_FIELD_NUMBER = 3;
private long refundTransactionUnlockTimeSecs_;
/**
* <code>required uint64 refundTransactionUnlockTimeSecs = 3;</code>
*/
public boolean hasRefundTransactionUnlockTimeSecs() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required uint64 refundTransactionUnlockTimeSecs = 3;</code>
*/
public long getRefundTransactionUnlockTimeSecs() {
return refundTransactionUnlockTimeSecs_;
}
// required bytes contractTransaction = 4;
public static final int CONTRACTTRANSACTION_FIELD_NUMBER = 4;
private com.google.protobuf.ByteString contractTransaction_;
/**
* <code>required bytes contractTransaction = 4;</code>
*/
public boolean hasContractTransaction() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>required bytes contractTransaction = 4;</code>
*/
public com.google.protobuf.ByteString getContractTransaction() {
return contractTransaction_;
}
// required bytes clientOutput = 5;
public static final int CLIENTOUTPUT_FIELD_NUMBER = 5;
private com.google.protobuf.ByteString clientOutput_;
/**
* <code>required bytes clientOutput = 5;</code>
*/
public boolean hasClientOutput() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>required bytes clientOutput = 5;</code>
*/
public com.google.protobuf.ByteString getClientOutput() {
return clientOutput_;
}
// required bytes myKey = 6;
public static final int MYKEY_FIELD_NUMBER = 6;
private com.google.protobuf.ByteString myKey_;
/**
* <code>required bytes myKey = 6;</code>
*/
public boolean hasMyKey() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>required bytes myKey = 6;</code>
*/
public com.google.protobuf.ByteString getMyKey() {
return myKey_;
}
private void initFields() {
bestValueToMe_ = 0L;
bestValueSignature_ = com.google.protobuf.ByteString.EMPTY;
refundTransactionUnlockTimeSecs_ = 0L;
contractTransaction_ = com.google.protobuf.ByteString.EMPTY;
clientOutput_ = com.google.protobuf.ByteString.EMPTY;
myKey_ = com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasBestValueToMe()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasRefundTransactionUnlockTimeSecs()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasContractTransaction()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasClientOutput()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasMyKey()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeUInt64(1, bestValueToMe_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, bestValueSignature_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeUInt64(3, refundTransactionUnlockTimeSecs_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBytes(4, contractTransaction_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeBytes(5, clientOutput_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeBytes(6, myKey_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(1, bestValueToMe_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, bestValueSignature_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(3, refundTransactionUnlockTimeSecs_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(4, contractTransaction_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(5, clientOutput_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(6, myKey_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code paymentchannels.StoredServerPaymentChannel}
*
* <pre>
* A server-side payment channel in serialized form, which can be reloaded later if the server restarts
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannelOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.rimbit.rimbit.protocols.channels.ServerState.internal_static_paymentchannels_StoredServerPaymentChannel_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.rimbit.rimbit.protocols.channels.ServerState.internal_static_paymentchannels_StoredServerPaymentChannel_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.class, com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.Builder.class);
}
// Construct using com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
bestValueToMe_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
bestValueSignature_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
refundTransactionUnlockTimeSecs_ = 0L;
bitField0_ = (bitField0_ & ~0x00000004);
contractTransaction_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000008);
clientOutput_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000010);
myKey_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000020);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.rimbit.rimbit.protocols.channels.ServerState.internal_static_paymentchannels_StoredServerPaymentChannel_descriptor;
}
public com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel getDefaultInstanceForType() {
return com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.getDefaultInstance();
}
public com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel build() {
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel buildPartial() {
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel result = new com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.bestValueToMe_ = bestValueToMe_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.bestValueSignature_ = bestValueSignature_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.refundTransactionUnlockTimeSecs_ = refundTransactionUnlockTimeSecs_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.contractTransaction_ = contractTransaction_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.clientOutput_ = clientOutput_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
result.myKey_ = myKey_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel) {
return mergeFrom((com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel other) {
if (other == com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel.getDefaultInstance()) return this;
if (other.hasBestValueToMe()) {
setBestValueToMe(other.getBestValueToMe());
}
if (other.hasBestValueSignature()) {
setBestValueSignature(other.getBestValueSignature());
}
if (other.hasRefundTransactionUnlockTimeSecs()) {
setRefundTransactionUnlockTimeSecs(other.getRefundTransactionUnlockTimeSecs());
}
if (other.hasContractTransaction()) {
setContractTransaction(other.getContractTransaction());
}
if (other.hasClientOutput()) {
setClientOutput(other.getClientOutput());
}
if (other.hasMyKey()) {
setMyKey(other.getMyKey());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasBestValueToMe()) {
return false;
}
if (!hasRefundTransactionUnlockTimeSecs()) {
return false;
}
if (!hasContractTransaction()) {
return false;
}
if (!hasClientOutput()) {
return false;
}
if (!hasMyKey()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.rimbit.rimbit.protocols.channels.ServerState.StoredServerPaymentChannel) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required uint64 bestValueToMe = 1;
private long bestValueToMe_ ;
/**
* <code>required uint64 bestValueToMe = 1;</code>
*/
public boolean hasBestValueToMe() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required uint64 bestValueToMe = 1;</code>
*/
public long getBestValueToMe() {
return bestValueToMe_;
}
/**
* <code>required uint64 bestValueToMe = 1;</code>
*/
public Builder setBestValueToMe(long value) {
bitField0_ |= 0x00000001;
bestValueToMe_ = value;
onChanged();
return this;
}
/**
* <code>required uint64 bestValueToMe = 1;</code>
*/
public Builder clearBestValueToMe() {
bitField0_ = (bitField0_ & ~0x00000001);
bestValueToMe_ = 0L;
onChanged();
return this;
}
// optional bytes bestValueSignature = 2;
private com.google.protobuf.ByteString bestValueSignature_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes bestValueSignature = 2;</code>
*/
public boolean hasBestValueSignature() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes bestValueSignature = 2;</code>
*/
public com.google.protobuf.ByteString getBestValueSignature() {
return bestValueSignature_;
}
/**
* <code>optional bytes bestValueSignature = 2;</code>
*/
public Builder setBestValueSignature(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
bestValueSignature_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes bestValueSignature = 2;</code>
*/
public Builder clearBestValueSignature() {
bitField0_ = (bitField0_ & ~0x00000002);
bestValueSignature_ = getDefaultInstance().getBestValueSignature();
onChanged();
return this;
}
// required uint64 refundTransactionUnlockTimeSecs = 3;
private long refundTransactionUnlockTimeSecs_ ;
/**
* <code>required uint64 refundTransactionUnlockTimeSecs = 3;</code>
*/
public boolean hasRefundTransactionUnlockTimeSecs() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required uint64 refundTransactionUnlockTimeSecs = 3;</code>
*/
public long getRefundTransactionUnlockTimeSecs() {
return refundTransactionUnlockTimeSecs_;
}
/**
* <code>required uint64 refundTransactionUnlockTimeSecs = 3;</code>
*/
public Builder setRefundTransactionUnlockTimeSecs(long value) {
bitField0_ |= 0x00000004;
refundTransactionUnlockTimeSecs_ = value;
onChanged();
return this;
}
/**
* <code>required uint64 refundTransactionUnlockTimeSecs = 3;</code>
*/
public Builder clearRefundTransactionUnlockTimeSecs() {
bitField0_ = (bitField0_ & ~0x00000004);
refundTransactionUnlockTimeSecs_ = 0L;
onChanged();
return this;
}
// required bytes contractTransaction = 4;
private com.google.protobuf.ByteString contractTransaction_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes contractTransaction = 4;</code>
*/
public boolean hasContractTransaction() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>required bytes contractTransaction = 4;</code>
*/
public com.google.protobuf.ByteString getContractTransaction() {
return contractTransaction_;
}
/**
* <code>required bytes contractTransaction = 4;</code>
*/
public Builder setContractTransaction(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
contractTransaction_ = value;
onChanged();
return this;
}
/**
* <code>required bytes contractTransaction = 4;</code>
*/
public Builder clearContractTransaction() {
bitField0_ = (bitField0_ & ~0x00000008);
contractTransaction_ = getDefaultInstance().getContractTransaction();
onChanged();
return this;
}
// required bytes clientOutput = 5;
private com.google.protobuf.ByteString clientOutput_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes clientOutput = 5;</code>
*/
public boolean hasClientOutput() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>required bytes clientOutput = 5;</code>
*/
public com.google.protobuf.ByteString getClientOutput() {
return clientOutput_;
}
/**
* <code>required bytes clientOutput = 5;</code>
*/
public Builder setClientOutput(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
clientOutput_ = value;
onChanged();
return this;
}
/**
* <code>required bytes clientOutput = 5;</code>
*/
public Builder clearClientOutput() {
bitField0_ = (bitField0_ & ~0x00000010);
clientOutput_ = getDefaultInstance().getClientOutput();
onChanged();
return this;
}
// required bytes myKey = 6;
private com.google.protobuf.ByteString myKey_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes myKey = 6;</code>
*/
public boolean hasMyKey() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>required bytes myKey = 6;</code>
*/
public com.google.protobuf.ByteString getMyKey() {
return myKey_;
}
/**
* <code>required bytes myKey = 6;</code>
*/
public Builder setMyKey(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000020;
myKey_ = value;
onChanged();
return this;
}
/**
* <code>required bytes myKey = 6;</code>
*/
public Builder clearMyKey() {
bitField0_ = (bitField0_ & ~0x00000020);
myKey_ = getDefaultInstance().getMyKey();
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:paymentchannels.StoredServerPaymentChannel)
}
static {
defaultInstance = new StoredServerPaymentChannel(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:paymentchannels.StoredServerPaymentChannel)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_paymentchannels_StoredServerPaymentChannels_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_paymentchannels_StoredServerPaymentChannels_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_paymentchannels_StoredServerPaymentChannel_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_paymentchannels_StoredServerPaymentChannel_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n storedserverpaymentchannel.proto\022\017paym" +
"entchannels\"\\\n\033StoredServerPaymentChanne" +
"ls\022=\n\010channels\030\001 \003(\0132+.paymentchannels.S" +
"toredServerPaymentChannel\"\272\001\n\032StoredServ" +
"erPaymentChannel\022\025\n\rbestValueToMe\030\001 \002(\004\022" +
"\032\n\022bestValueSignature\030\002 \001(\014\022\'\n\037refundTra" +
"nsactionUnlockTimeSecs\030\003 \002(\004\022\033\n\023contract" +
"Transaction\030\004 \002(\014\022\024\n\014clientOutput\030\005 \002(\014\022" +
"\r\n\005myKey\030\006 \002(\014B3\n$com.rimbit.rimbit.prot" +
"ocols.channelsB\013ServerState"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_paymentchannels_StoredServerPaymentChannels_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_paymentchannels_StoredServerPaymentChannels_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_paymentchannels_StoredServerPaymentChannels_descriptor,
new java.lang.String[] { "Channels", });
internal_static_paymentchannels_StoredServerPaymentChannel_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_paymentchannels_StoredServerPaymentChannel_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_paymentchannels_StoredServerPaymentChannel_descriptor,
new java.lang.String[] { "BestValueToMe", "BestValueSignature", "RefundTransactionUnlockTimeSecs", "ContractTransaction", "ClientOutput", "MyKey", });
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}
| |
/*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.lang.parser.exceptions;
import gw.lang.parser.IExpression;
import gw.lang.parser.expressions.IProgram;
import gw.lang.reflect.gs.IGosuClass;
import gw.lang.parser.resources.ResourceKey;
import gw.lang.parser.IParseIssue;
import gw.lang.parser.ISymbolTable;
import gw.lang.parser.IParsedElement;
import gw.lang.parser.IParserState;
import gw.lang.parser.IFullParserState;
import gw.lang.parser.IParserPart;
import gw.lang.parser.IScriptPartId;
import gw.lang.reflect.IType;
import gw.config.CommonServices;
public abstract class ParseIssue extends Exception implements IParseIssue
{
private static final String SOURCE_DELIMITER = "\nat line ";
private static final int CONTEXT_LINES = 3;
private Integer _lineNumber;
private Integer _lineOffset;
private Integer _tokenColumn;
private Integer _tokenStart;
private Integer _tokenEnd;
private ResourceKey _messageKey;
private Object[] _messageArgs;
private ISymbolTable _symbolTable;
private IParsedElement _parentElement;
private String _stateSource;
private IGosuClass _parentClass;
private void debug() {
// if (toString().contains("PXLOGGER")) {
// int uuuu = 0;
// }
}
protected ParseIssue( IParserState parserState, ResourceKey key, Object... msgArgs )
{
super( "" );
_messageKey = key;
_messageArgs = normalizeMessageArgs(msgArgs);
initFieldsFromParserState( parserState );
debug();
}
protected ParseIssue( Integer lineNumber, Integer lineOffset, Integer tokenColumn,
Integer tokenStart, Integer tokenEnd,
ISymbolTable symbolTable, ResourceKey key, Object... msgArgs )
{
super( "" );
_messageKey = key;
_messageArgs = normalizeMessageArgs(msgArgs);
_symbolTable = symbolTable;
_lineNumber = lineNumber;
_lineOffset = lineOffset == null ? 1 : Math.max( 1, lineOffset );
_tokenColumn = tokenColumn;
_tokenStart = tokenStart;
_tokenEnd = tokenEnd;
debug();
}
protected ParseIssue( IParserState state, Throwable t )
{
super( t );
initFieldsFromParserState( state );
debug();
}
/**
* Don't fill in stack trace since parse issues are not really "exceptional"
* in terms of the parser's Java implementation; we don't care much about the
* Java stack trace when these are thrown. Rather parse issues provide a means
* to tag parsed elements with issues discovered during parsing, such as
* syntax warnings and errors. Hence, the ParseIssue interface.
* <p/>
* Note this method is otherwise very costly from a performance standpoint.
* <p/>
* todo: consider extracting the bulk of this class into a separate non-exception class and reference that class here for when we need to use it as a real exception
*/
public Throwable fillInStackTrace()
{
return this;
}
private void initFieldsFromParserState( IParserState parserState )
{
if( parserState != null )
{
if( parserState instanceof IFullParserState )
{
IFullParserState fullParserState = (IFullParserState)parserState;
_symbolTable = fullParserState.getSymbolTable();
}
_lineNumber = parserState.getLineNumber();
_lineOffset = Math.max( 1, parserState.getLineOffset() );
_tokenColumn = parserState.getTokenColumn();
_tokenStart = parserState.getTokenStart();
_tokenEnd = parserState.getTokenEnd();
// Copy the value because the source is a string built from a StringBuffer which will have the whole array allocated.
String parserSource = parserState.getSource();
setStateSource( parserSource );
}
}
/**
* Normalize all non string & non number args to string types to prevent
* race conditions wrt/ the TypeSystem lock when the message is formatted.
*/
private Object [] normalizeMessageArgs(Object [] args) {
Object [] result = args;
if(args != null && args.length > 0) {
result = new Object[args.length];
for(int i = 0; i < args.length; i++) {
if(args[i] == null || args[i] instanceof CharSequence || args[i] instanceof Number) {
result[i] = args[i];
} else {
result[i] = args[i].toString();
}
}
}
return result;
}
protected static String formatError( ResourceKey key, Object... msgArgs )
{
if( key != null )
{
return CommonServices.getGosuLocalizationService().localize( key, msgArgs );
}
else
{
return "";
}
}
public Integer getLineNumber()
{
return _lineNumber;
}
public Integer getLineOffset()
{
return _lineOffset;
}
public void addLineOffset( int offset )
{
_lineOffset += offset;
}
public Integer getTokenColumn()
{
return _tokenColumn;
}
public Integer getTokenEnd()
{
return _tokenEnd;
}
public Integer getTokenStart()
{
return _tokenStart;
}
public String getContextString()
{
if( getStateSource() != null )
{
return makeContextString( _lineNumber, getStateSource(), getLineReportingOffset() );
}
else
{
return null;
}
}
public String getContextStringNoLineNumbers() {
if( getStateSource() != null )
{
return makeContextString( _lineNumber, getStateSource(), getLineReportingOffset(), false );
}
else
{
return null;
}
}
public String getStateSource()
{
if( _stateSource == null )
{
return _parentClass != null ? _parentClass.getSourceFileHandle().getSource().getSource() : null;
}
return _stateSource;
}
public void setStateSource( String parserSource )
{
_stateSource = parserSource;
}
private String getMyMessage() {
if (_messageKey != null) {
return formatError(_messageKey, _messageArgs);
} else if (getCause() != null){
return getCause().getMessage();
} else {
return "";
}
}
public String getPlainMessage()
{
return getMyMessage();
}
public String getConsoleMessage()
{
StringBuilder retVal = new StringBuilder();
retVal.append( getMyMessage() );
if( _lineNumber != null )
{
retVal.append( " [line:" ).append( _lineNumber - getLineReportingOffset() ).append( " col:" ).append( _tokenColumn );
retVal.append( "]" );
if( getStateSource() != null )
{
retVal.append( " in\n" );
retVal.append( getContextString() );
}
}
return (retVal.toString());
}
public static String makeContextString( int lineOfError, String source, int lineReportingOffset )
{
return makeContextString(lineOfError, source, lineReportingOffset, true);
}
private static String makeContextString( int lineOfError, String source, int lineReportingOffset, boolean showLineNumbers )
{
int offset = (CONTEXT_LINES - 1) / 2;
int minLine = Math.max( lineOfError - offset, 1 );
int maxLine = lineOfError + offset;
int padding = maxLine <= 0 ? 1 : ((int)Math.log10( maxLine ) + 1);
StringBuilder sb = new StringBuilder();
int pos = 0;
int currentLine = 1;
while( pos < source.length() && currentLine < minLine )
{
char c = source.charAt( pos );
if( c == '\n' )
{
++currentLine;
}
++pos;
}
if( showLineNumbers )
{
sb.append( String.format( "line %1$" + padding + "s: ", currentLine - lineReportingOffset ) );
}
while( pos < source.length() && currentLine <= maxLine )
{
char c = source.charAt( pos );
if( c == '\n' )
{
++currentLine;
if( currentLine <= maxLine )
{
sb.append( '\n' );
if( showLineNumbers )
{
sb.append( String.format( "line %1$" + padding + "s: ", currentLine - lineReportingOffset ) );
}
}
}
else
{
sb.append( c );
}
++pos;
}
return sb.toString();
}
public String getUIMessage()
{
String strMessage = getMyMessage();
if( strMessage != null )
{
int iIndex = strMessage.indexOf( SOURCE_DELIMITER );
if( iIndex >= 0 )
{
strMessage = strMessage.substring( 0, iIndex );
}
}
return strMessage;
}
public int getLine()
{
Integer lineNumber = getLineNumber();
return lineNumber == null ? -1 : lineNumber;
}
public int getColumn()
{
return getTokenColumn();
}
/**
* Warning: Only valid if called from the parser thread. Otherwise we null it out.
*/
public IParsedElement getSource()
{
return _parentElement;
}
public void setSource( IParsedElement sourceOfError )
{
_parentElement = sourceOfError;
}
/**
* Warning: Only valid if called from the parser thread. Otherwise we null it out.
*/
public ISymbolTable getSymbolTable()
{
return _symbolTable;
}
public boolean appliesToPosition( int iPos )
{
Integer tokenStart = getTokenStart();
Integer tokenEnd = getTokenEnd();
return iPos >= tokenStart && iPos <= tokenEnd;
}
public ResourceKey getMessageKey()
{
return _messageKey;
}
public void resolve( IParserPart parserBase )
{
if( parserBase != null )
{
IScriptPartId scriptPart = parserBase.getOwner().getScriptPart();
IType parentType = scriptPart == null ? null : scriptPart.getContainingType();
if( parentType instanceof IGosuClass )
{
_parentClass = (IGosuClass)parentType;
_stateSource = null;
}
}
if( _parentElement != null )
{
resetPositions();
if( parserBase != null && !parserBase.getOwner().isEditorParser() )
{
_parentElement = null;
_symbolTable = null;
}
}
}
public void resetPositions()
{
if( _parentElement == null )
{
return;
}
boolean bForce = _parentElement instanceof IExpression && !(_parentElement instanceof IProgram);
if( bForce || _lineNumber == null )
{
_lineNumber = _parentElement.getLineNum();
}
if( bForce || _lineOffset == null )
{
_lineOffset = _parentElement.getLocation().getOffset() - _parentElement.getLocation().getColumn();
}
if( bForce || _tokenColumn == null )
{
_tokenColumn = _parentElement.getColumn();
}
if( bForce || _tokenStart == null )
{
_tokenStart = _parentElement.getLocation().getOffset();
}
if( bForce || _tokenEnd == null )
{
_tokenEnd = _parentElement.getLocation().getExtent() + 1;
}
}
public void adjustOffset(int offset, int lineNumOffset, int columnOffset) {
_tokenStart += offset;
_tokenEnd += offset;
_lineNumber += lineNumOffset;
_tokenColumn += columnOffset;
}
public void setMessage( ResourceKey key, Object... args )
{
_messageKey = key;
_messageArgs = normalizeMessageArgs(args);
}
public Object[] getMessageArgs()
{
return _messageArgs;
}
public int getLineReportingOffset()
{
return 0;
}
public IType getExpectedType()
{
return null;
}
}
| |
/* $This file is distributed under the terms of the license in LICENSE$ */
package edu.cornell.mannlib.vitro.webapp.reasoner;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.jena.ontology.OntModel;
import org.apache.jena.query.Query;
import org.apache.jena.query.QueryExecution;
import org.apache.jena.query.QueryExecutionFactory;
import org.apache.jena.query.QueryFactory;
import org.apache.jena.query.QuerySolution;
import org.apache.jena.query.ResultSet;
import org.apache.jena.query.ResultSetFactory;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.NodeIterator;
import org.apache.jena.rdf.model.Property;
import org.apache.jena.rdf.model.RDFNode;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.ResourceFactory;
import org.apache.jena.rdf.model.Statement;
import org.apache.jena.rdf.model.StmtIterator;
import org.apache.jena.shared.Lock;
import org.apache.jena.vocabulary.OWL;
import org.apache.jena.vocabulary.RDF;
import org.apache.jena.vocabulary.RDFS;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.modelaccess.ModelNames;
import edu.cornell.mannlib.vitro.webapp.modules.searchIndexer.SearchIndexer;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ResultSetConsumer;
public class ABoxRecomputer {
private static final Log log = LogFactory.getLog(ABoxRecomputer.class);
private final SearchIndexer searchIndexer;
private OntModel tboxModel; // asserted and inferred TBox axioms
private OntModel aboxModel;
private RDFService rdfService;
private SimpleReasoner simpleReasoner;
private Object lock1 = new Object();
private volatile boolean recomputing = false;
private boolean stopRequested = false;
private final int BATCH_SIZE = 500;
private final int REPORTING_INTERVAL = 1000;
/**
* @param tboxModel - input. This model contains both asserted and inferred TBox axioms
* @param aboxModel - input. This model contains asserted ABox statements
*/
public ABoxRecomputer(OntModel tboxModel,
OntModel aboxModel,
RDFService rdfService,
SimpleReasoner simpleReasoner,
SearchIndexer searchIndexer) {
this.tboxModel = tboxModel;
this.aboxModel = aboxModel;
this.rdfService = rdfService;
this.simpleReasoner = simpleReasoner;
this.searchIndexer = searchIndexer;
recomputing = false;
stopRequested = false;
}
/**
* Returns true if the recomputer is in the process of recomputing
* all inferences.
*/
public boolean isRecomputing() {
return recomputing;
}
/**
* Recompute all individuals
*/
public void recompute() {
synchronized (lock1) {
if (recomputing) {
return;
} else {
recomputing = true;
}
}
try {
if (searchIndexer != null) {
searchIndexer.pause();
// Register now that we want to rebuild the index when we unpause
// This allows the indexer to optimize behaviour whilst paused
searchIndexer.rebuildIndex();
}
log.info("Recomputing ABox inferences.");
log.info("Finding individuals in ABox.");
Queue<String>individualURIs = this.getAllIndividualURIs();
log.info("Recomputing inferences for " + individualURIs.size() + " individuals");
// Create a type cache for this execution and pass it to the recompute function
// Ensures that caches are only valid for the length of one recompute
recomputeIndividuals(individualURIs, new TypeCaches());
log.info("Finished recomputing inferences");
} finally {
if(searchIndexer != null) {
searchIndexer.unpause();
}
synchronized (lock1) {
recomputing = false;
}
}
}
/**
* Recompute inferences for specified collection of individual URIs,
* or all URIs if parameter is null
*/
public void recompute(Queue<String> individualURIs) {
boolean sizableRecompute = (individualURIs.size() > 20);
try {
if(sizableRecompute && searchIndexer != null) {
searchIndexer.pause();
}
recomputeIndividuals(individualURIs);
} finally {
if (sizableRecompute && searchIndexer != null) {
searchIndexer.unpause();
}
}
}
/*
* Recompute the ABox inference graph for the specified collection of
* individual URIs
*/
private void recomputeIndividuals(Queue<String> individuals) {
recomputeIndividuals(individuals, new TypeCaches());
}
/*
* Recompute the ABox inference graph for the specified collection of
* individual URIs
*/
protected void recomputeIndividuals(Queue<String> individuals, TypeCaches caches) {
if (individuals == null) {
return;
}
long start = System.currentTimeMillis();
int size = individuals.size();
int numInds = 0;
Model rebuildModel = ModelFactory.createDefaultModel();
Model additionalInferences = ModelFactory.createDefaultModel();
List<String> individualsInBatch = new ArrayList<String>();
while (!individuals.isEmpty()) {
String individualURI = individuals.poll();
try {
additionalInferences.add(recomputeIndividual(
individualURI, rebuildModel, caches, individuals));
numInds++;
individualsInBatch.add(individualURI);
boolean batchFilled = (numInds % BATCH_SIZE) == 0;
boolean reportingInterval = (numInds % REPORTING_INTERVAL) == 0;
if (batchFilled || individuals.isEmpty()) {
log.debug(rebuildModel.size() + " total inferences");
updateInferenceModel(rebuildModel, individualsInBatch);
rebuildModel.removeAll();
individualsInBatch.clear();
}
if (reportingInterval) {
log.info("Still recomputing inferences ("
+ numInds + "/" + size + " individuals)");
log.info((System.currentTimeMillis() - start) / numInds + " ms per individual");
}
if (stopRequested) {
log.info("a stopRequested signal was received during recomputeIndividuals. Halting Processing.");
return;
}
} catch (Exception e) {
log.error("Error recomputing inferences for individual <" + individualURI + ">", e);
}
}
if(additionalInferences.size() > 0) {
log.debug("Writing additional inferences generated by reasoner plugins.");
ChangeSet change = rdfService.manufactureChangeSet();
change.addAddition(makeN3InputStream(additionalInferences),
RDFService.ModelSerializationFormat.N3, ModelNames.ABOX_INFERENCES);
try {
rdfService.changeSetUpdate(change);
} catch (RDFServiceException e) {
log.error("Unable to write additional inferences from reasoner plugins", e);
}
}
}
private static final boolean RUN_PLUGINS = true;
private static final boolean SKIP_PLUGINS = !RUN_PLUGINS;
private Model recomputeIndividual(String individualURI,
Model rebuildModel, TypeCaches caches, Collection<String> individualQueue)
throws RDFServiceException {
long start = System.currentTimeMillis();
Model assertions = getAssertions(individualURI);
log.debug((System.currentTimeMillis() - start) + " ms to get assertions.");
long prevRebuildSize = (simpleReasoner.getSameAsEnabled()) ? rebuildModel.size() : 0;
if (simpleReasoner.getSameAsEnabled()) {
Set<String> sameAsInds = getSameAsIndividuals(individualURI);
for (String sameAsInd : sameAsInds) {
// sameAs for plugins is handled by the SimpleReasoner
Model sameAsIndAssertions = getAssertions(sameAsInd);
rebuildModel.add(
rewriteInferences(getAssertions(sameAsInd), individualURI));
Resource indRes = ResourceFactory.createResource(individualURI);
Resource sameAsIndRes = ResourceFactory.createResource(sameAsInd);
if(!assertions.contains(indRes, OWL.sameAs, sameAsIndRes)) {
if(!rebuildModel.contains(indRes, OWL.sameAs, sameAsIndRes)) {
individualQueue.add(sameAsInd);
rebuildModel.add(indRes, OWL.sameAs, sameAsIndRes);
}
}
}
if(rebuildModel.size() - prevRebuildSize > 0) {
individualQueue.addAll(sameAsInds);
}
}
Model additionalInferences = recomputeIndividual(
individualURI, null, assertions, rebuildModel, caches, RUN_PLUGINS);
return additionalInferences;
}
/**
* Adds inferences to temporary rebuildmodel
* @param individualURI The individual
* @param rebuildModel The rebuild model
* @return any additional inferences produced by plugins that affect other
* individuals
*/
private Model recomputeIndividual(String individualURI, String aliasURI,
Model assertions, Model rebuildModel, TypeCaches caches, boolean runPlugins)
throws RDFServiceException {
Model additionalInferences = ModelFactory.createDefaultModel();
Resource individual = ResourceFactory.createResource(individualURI);
long start = System.currentTimeMillis();
Model types = ModelFactory.createDefaultModel();
types.add(assertions.listStatements(individual, RDF.type, (RDFNode) null));
types.add(rebuildModel.listStatements(individual, RDF.type, (RDFNode) null));
Model inferredTypes = rewriteInferences(getInferredTypes(individual, types, caches), aliasURI);
rebuildModel.add(inferredTypes);
log.trace((System.currentTimeMillis() - start) + " to infer " + inferredTypes.size() + " types");
start = System.currentTimeMillis();
types.add(inferredTypes);
Model mst = getMostSpecificTypes(individual, types, caches);
rebuildModel.add(rewriteInferences(mst, aliasURI));
log.trace((System.currentTimeMillis() - start) + " to infer " + mst.size() + " mostSpecificTypes");
start = System.currentTimeMillis();
Model inferredInvs = getInferredInverseStatements(individualURI);
inferredInvs.remove(assertions);
rebuildModel.add(rewriteInferences(inferredInvs, aliasURI));
log.trace((System.currentTimeMillis() - start) + " to infer " + inferredInvs.size() + " inverses");
List<ReasonerPlugin> pluginList = simpleReasoner.getPluginList();
if (runPlugins && pluginList.size() > 0) {
Model tmpModel = ModelFactory.createDefaultModel();
StmtIterator sit = assertions.listStatements();
while (sit.hasNext()) {
Statement s = sit.nextStatement();
for (ReasonerPlugin plugin : pluginList) {
plugin.addedABoxStatement(s, aboxModel, tmpModel, tboxModel);
}
}
StmtIterator tmpIt = tmpModel.listStatements();
while(tmpIt.hasNext()) {
Statement tmpStmt = tmpIt.nextStatement();
if(individual.equals(tmpStmt.getSubject())) {
rebuildModel.add(tmpStmt);
} else {
additionalInferences.add(tmpStmt);
}
}
}
return additionalInferences;
}
private Model getAssertions(String individualURI) throws RDFServiceException {
String queryStr = "CONSTRUCT { \n" +
" <" + individualURI + "> ?p ?value \n" +
"} WHERE { \n" +
" GRAPH ?g { \n" +
" <" + individualURI + "> ?p ?value \n" +
" } \n" +
" FILTER (?g != <" + ModelNames.ABOX_INFERENCES + ">)\n" +
"} \n";
Model model = ModelFactory.createDefaultModel();
rdfService.sparqlConstructQuery(queryStr, model);
return model;
}
private Model getInferredTypes(Resource individual, Model assertedTypes, TypeCaches caches) {
if (caches == null) {
return getInferredTypes(individual, assertedTypes);
}
TypeList key = new TypeList(assertedTypes, RDF.type);
Model inferredTypes = caches.getInferredTypesToModel(key, individual);
if (inferredTypes == null) {
inferredTypes = getInferredTypes(individual, assertedTypes);
caches.cacheInferredTypes(key, inferredTypes);
}
return inferredTypes;
}
private Model getInferredTypes(Resource individual, Model assertedTypes) {
new TypeList(assertedTypes, RDF.type);
String queryStr = "CONSTRUCT { \n" +
" <" + individual.getURI() + "> a ?type \n" +
"} WHERE { \n" +
" <" + individual.getURI() + "> a ?assertedType .\n" +
" { ?assertedType <" + RDFS.subClassOf.getURI() + "> ?type } \n" +
" UNION \n" +
" { ?assertedType <" + OWL.equivalentClass.getURI() + "> ?type } \n" +
" FILTER (isURI(?type)) \n" +
" FILTER NOT EXISTS { \n" +
" <" + individual.getURI() + "> a ?type \n" +
" } \n" +
"} \n";
Model union = ModelFactory.createUnion(assertedTypes, tboxModel);
tboxModel.enterCriticalSection(Lock.READ);
try {
Query q = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.create(q, union);
return qe.execConstruct();
} finally {
tboxModel.leaveCriticalSection();
}
}
private Model getMostSpecificTypes(Resource individual, Model assertedTypes, TypeCaches caches) {
if (caches == null) {
return getMostSpecificTypes(individual, assertedTypes);
}
TypeList key = new TypeList(assertedTypes, RDF.type);
Model mostSpecificTypes = caches.getMostSpecificTypesToModel(key, individual);
if (mostSpecificTypes == null) {
mostSpecificTypes = getMostSpecificTypes(individual, assertedTypes);
caches.cacheMostSpecificTypes(key, mostSpecificTypes);
}
return mostSpecificTypes;
}
private Model getMostSpecificTypes(Resource individual, Model assertedTypes) {
String queryStr = "CONSTRUCT { \n" +
" <" + individual.getURI() + "> <" + VitroVocabulary.MOST_SPECIFIC_TYPE + "> ?type \n" +
"} WHERE { \n" +
" <" + individual.getURI() + "> a ?type .\n" +
" FILTER (isURI(?type)) \n" +
" FILTER NOT EXISTS { \n" +
" <" + individual.getURI() + "> a ?type2 . \n" +
" ?type2 <" + RDFS.subClassOf.getURI() + "> ?type. \n" +
" FILTER (?type != ?type2) \n" +
" FILTER NOT EXISTS { ?type <" + OWL.equivalentClass + "> ?type2 } \n" +
" } \n" +
" FILTER NOT EXISTS { \n" +
" <" + individual.getURI() + "> <" + VitroVocabulary.MOST_SPECIFIC_TYPE + "> ?type \n" +
" } \n" +
"} \n";
Model union = ModelFactory.createUnion(assertedTypes, tboxModel);
tboxModel.enterCriticalSection(Lock.READ);
try {
Query q = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.create(q, union);
return qe.execConstruct();
} finally {
tboxModel.leaveCriticalSection();
}
}
private Model getInferredInverseStatements(String individualURI) throws RDFServiceException {
String queryStr = "CONSTRUCT { \n" +
" <" + individualURI + "> ?inv ?value \n" +
"} WHERE { \n" +
" GRAPH ?gr { \n" +
" ?value ?prop <" + individualURI + "> \n" +
" } \n" +
" FILTER (isURI(?value)) \n" +
" FILTER (?gr != <" + ModelNames.ABOX_INFERENCES + ">) \n" +
" { ?prop <" + OWL.inverseOf.getURI() + "> ?inv } \n" +
" UNION \n" +
" { ?inv <" + OWL.inverseOf.getURI() + "> ?prop } \n" +
"} \n";
Model model = ModelFactory.createDefaultModel();
rdfService.sparqlConstructQuery(queryStr, model);
return model;
}
private Model rewriteInferences(Model inferences, String aliasURI) {
if (aliasURI == null) {
return inferences;
}
Model rewrite = ModelFactory.createDefaultModel();
Resource alias = ResourceFactory.createResource(aliasURI);
StmtIterator sit = inferences.listStatements();
while(sit.hasNext()) {
Statement stmt = sit.nextStatement();
rewrite.add(alias, stmt.getPredicate(), stmt.getObject());
}
return rewrite;
}
/*
* Get the URIs for all individuals in the system
*/
protected Queue<String> getAllIndividualURIs() {
Queue<String> individualURIs = new IndividualURIQueue<String>();
List<String> classList = new ArrayList<String>();
tboxModel.enterCriticalSection(Lock.READ);
try {
StmtIterator classIt = tboxModel.listStatements(
(Resource) null, RDF.type, OWL.Class);
while(classIt.hasNext()) {
Statement stmt = classIt.nextStatement();
if(stmt.getSubject().isURIResource()
&& stmt.getSubject().getURI() != null
&& !stmt.getSubject().getURI().isEmpty()) {
classList.add(stmt.getSubject().getURI());
}
}
} finally {
tboxModel.leaveCriticalSection();
}
for (String classURI : classList) {
String queryString = "SELECT ?s WHERE { ?s a <" + classURI + "> } ";
getIndividualURIs(queryString, individualURIs);
}
return individualURIs;
}
protected void getIndividualURIs(String queryString, final Queue<String> individuals) {
final int batchSize = 50000;
int offset = 0;
final AtomicBoolean done = new AtomicBoolean(false);
while (!done.get()) {
String queryStr = queryString + " LIMIT " + batchSize + " OFFSET " + offset;
if(log.isDebugEnabled()) {
log.debug(queryStr);
}
try {
rdfService.sparqlSelectQuery(queryStr, new ResultSetConsumer() {
private int count = 0;
@Override
protected void processQuerySolution(QuerySolution qs) {
count++;
Resource resource = qs.getResource("s");
if ((resource != null) && !resource.isAnon()) {
individuals.add(resource.getURI());
}
}
@Override
protected void endProcessing() {
super.endProcessing();
if (count < batchSize) {
done.set(true);
}
}
});
} catch (RDFServiceException e) {
throw new RuntimeException(e);
}
if(log.isDebugEnabled()) {
log.debug(individuals.size() + " in set");
}
offset += batchSize;
}
}
protected void addInferenceStatementsFor(String individualUri, Model addTo) throws RDFServiceException {
StringBuilder builder = new StringBuilder();
builder.append("CONSTRUCT\n")
.append("{\n").append(" <").append(individualUri).append("> ?p ?o .\n")
.append("}\n")
.append("WHERE\n")
.append("{\n")
.append(" GRAPH <").append(ModelNames.ABOX_INFERENCES).append(">\n")
.append(" {\n").append(" <").append(individualUri).append("> ?p ?o .\n")
.append(" }\n")
.append("}\n");
rdfService.sparqlConstructQuery(builder.toString(), addTo);
}
/*
* reconcile a set of inferences into the application inference model
*/
protected void updateInferenceModel(Model rebuildModel,
Collection<String> individuals) throws RDFServiceException {
Model existing = ModelFactory.createDefaultModel();
for (String individualURI : individuals) {
addInferenceStatementsFor(individualURI, existing);
}
Model retractions = existing.difference(rebuildModel);
Model additions = rebuildModel.difference(existing);
if (additions.size() > 0 || retractions.size() > 0) {
long start = System.currentTimeMillis();
ChangeSet change = rdfService.manufactureChangeSet();
if (retractions.size() > 0) {
change.addRemoval(makeN3InputStream(retractions),
RDFService.ModelSerializationFormat.N3, ModelNames.ABOX_INFERENCES);
}
if (additions.size() > 0) {
change.addAddition(makeN3InputStream(additions),
RDFService.ModelSerializationFormat.N3, ModelNames.ABOX_INFERENCES);
}
rdfService.changeSetUpdate(change);
log.debug((System.currentTimeMillis() - start) +
" ms to retract " + retractions.size() +
" statements and add " + additions.size() + " statements");
}
}
private InputStream makeN3InputStream(Model m) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
m.write(out, "N3");
return new ByteArrayInputStream(out.toByteArray());
}
public Set<String> getSameAsIndividuals(String individualURI) {
HashSet<String> sameAsInds = new HashSet<String>();
sameAsInds.add(individualURI);
getSameAsIndividuals(individualURI, sameAsInds);
sameAsInds.remove(individualURI);
return sameAsInds;
}
private void getSameAsIndividuals(String individualUri, final Set<String> sameAsInds) {
try {
final List<String> addedURIs = new ArrayList<String>();
StringBuilder builder = new StringBuilder();
builder.append("SELECT\n")
.append(" ?object\n")
.append("WHERE {\n")
.append(" GRAPH ?g { \n")
.append(" {\n").append(" <").append(individualUri).append("> <").append(OWL.sameAs).append("> ?object .\n")
.append(" } UNION {\n").append(" ?object <").append(OWL.sameAs).append("> <").append(individualUri).append("> .\n")
.append(" }\n")
.append(" } \n")
.append(" FILTER (?g != <" + ModelNames.ABOX_INFERENCES + ">)\n")
.append("}\n");
rdfService.sparqlSelectQuery(builder.toString(), new ResultSetConsumer() {
@Override
protected void processQuerySolution(QuerySolution qs) {
Resource object = qs.getResource("object");
if (object != null && !sameAsInds.contains(object.getURI())) {
sameAsInds.add(object.getURI());
addedURIs.add(object.getURI());
}
}
});
for (String indUri : addedURIs) {
getSameAsIndividuals(indUri, sameAsInds);
}
} catch (RDFServiceException e) {
log.error(e,e);
}
}
/**
* This is called when the application shuts down.
*/
public void setStopRequested() {
this.stopRequested = true;
}
/**
* Caches for types -> inferred types, and types -> most specific type
*/
private static class TypeCaches {
private Map<TypeList, TypeList> inferredTypes = new HashMap<TypeList, TypeList>();
private Map<TypeList, TypeList> mostSpecificTypes = new HashMap<TypeList, TypeList>();
void cacheInferredTypes(TypeList key, Model model) {
inferredTypes.put(key, new TypeList(model, RDF.type));
}
Model getInferredTypesToModel(TypeList key, Resource individual) {
TypeList types = inferredTypes.get(key);
if (types != null) {
return types.constructModel(individual, RDF.type);
}
return null;
}
void cacheMostSpecificTypes(TypeList key, Model model) {
mostSpecificTypes.put(key, new TypeList(model, model.createProperty(VitroVocabulary.MOST_SPECIFIC_TYPE)));
}
Model getMostSpecificTypesToModel(TypeList key, Resource individual) {
TypeList types = mostSpecificTypes.get(key);
if (types != null) {
return types.constructModel(individual, VitroVocabulary.MOST_SPECIFIC_TYPE);
}
return null;
}
}
/**
* Bundle of type URIs
*/
private static class TypeList {
private List<String> typeUris = new ArrayList<String>();
private Integer hashCode = null;
/**
* Extract type uris - either RDF type or most specific type - from a Model
*/
TypeList(Model model, Property property) {
NodeIterator iterator = model.listObjectsOfProperty(property);
while (iterator.hasNext()) {
RDFNode node = iterator.next();
String uri = node.asResource().getURI();
if (!typeUris.contains(uri)) {
typeUris.add(uri);
}
}
}
Model constructModel(Resource individual, Property property) {
Model model = ModelFactory.createDefaultModel();
for (String uri : typeUris) {
model.add(individual, property, model.createResource(uri));
}
return model;
}
Model constructModel(Resource individual, String property) {
Model model = ModelFactory.createDefaultModel();
for (String uri : typeUris) {
model.add(individual, model.createProperty(property), model.createResource(uri));
}
return model;
}
public void addUri(String uri) {
if (!typeUris.contains(uri)) {
typeUris.add(uri);
hashCode = null;
}
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof TypeList)) {
return false;
}
TypeList otherKey = (TypeList)obj;
if (typeUris.size() != otherKey.typeUris.size()) {
return false;
}
return typeUris.containsAll(otherKey.typeUris);
}
@Override
public int hashCode() {
if (hashCode == null) {
Collections.sort(typeUris);
StringBuilder builder = new StringBuilder();
for (String key : typeUris) {
builder.append('<').append(key).append('>');
}
hashCode = builder.toString().hashCode();
}
return hashCode;
}
}
}
| |
/*
* Copyright (C) 2015 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okhttp.internal.allocations;
import com.squareup.okhttp.ConnectionPool;
import org.junit.Test;
import static junit.framework.TestCase.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
public final class ConnectionTest {
ConnectionPool connectionPool = new ConnectionPool(1, 1000L);
@Test public void reserveCreateCompleteRelease() throws Exception {
Connection connection = new Connection(connectionPool);
connection.setAllocationLimit(1);
Connection.StreamAllocation a = connection.reserve("a");
Connection.Stream a1 = a.newStream("a1");
assertNotNull(a1);
a.streamComplete(a1);
assertEquals(1, connection.size()); // Still allocated.
connection.release(a);
assertEquals(0, connection.size());
}
@Test public void reserveCreateReleaseComplete() throws Exception {
Connection connection = new Connection(connectionPool);
connection.setAllocationLimit(1);
Connection.StreamAllocation a = connection.reserve("a");
Connection.Stream a1 = a.newStream("a1");
assertNotNull(a1);
connection.release(a);
assertEquals(1, connection.size()); // Still allocated.
a.streamComplete(a1);
assertEquals(0, connection.size());
}
@Test public void reuseAllocation() throws Exception {
Connection connection = new Connection(connectionPool);
connection.setAllocationLimit(1);
Connection.StreamAllocation a = connection.reserve("a");
Connection.Stream a1 = a.newStream("a1");
assertNotNull(a1);
a.streamComplete(a1);
assertEquals(1, connection.size());
Connection.Stream a2 = a.newStream("a2");
assertNotNull(a2);
a.streamComplete(a2);
assertEquals(1, connection.size());
connection.release(a);
assertEquals(0, connection.size());
}
@Test public void cannotReuseAllocationAfterRelease() throws Exception {
Connection connection = new Connection(connectionPool);
connection.setAllocationLimit(1);
Connection.StreamAllocation a = connection.reserve("a");
Connection.Stream a1 = a.newStream("a1");
a.streamComplete(a1);
connection.release(a);
try {
a.newStream("a2");
fail();
} catch (IllegalStateException expected) {
}
}
@Test public void createReturnsNullAfterNoNewStreams() throws Exception {
Connection connection = new Connection(connectionPool);
connection.setAllocationLimit(1);
Connection.StreamAllocation a = connection.reserve("a");
Connection.Stream a1 = a.newStream("a1");
assertNotNull(a1);
a.streamComplete(a1);
assertEquals(1, connection.size());
connection.noNewStreams();
assertNull(a.newStream("a2"));
assertEquals(1, connection.size());
connection.release(a);
assertEquals(0, connection.size());
}
@Test public void reserveReturnsNullAfterNoNewStreams() throws Exception {
Connection connection = new Connection(connectionPool);
connection.setAllocationLimit(1);
Connection.StreamAllocation a = connection.reserve("a");
Connection.Stream a1 = a.newStream("a1");
connection.noNewStreams();
assertNull(connection.reserve("b"));
// Even after streams are released, the limit still holds.
a.streamComplete(a1);
assertNull(connection.reserve("c"));
}
@Test public void closeScheduledAfterNoNewStreams() throws Exception {
Connection connection = new Connection(connectionPool);
connection.setAllocationLimit(1);
Connection.StreamAllocation a = connection.reserve("c");
Connection.Stream a1 = a.newStream("a1");
connection.noNewStreams();
assertEquals(Long.MAX_VALUE, connection.idleAt);
a.streamComplete(a1);
assertEquals(Long.MAX_VALUE, connection.idleAt);
connection.release(a);
assertNotEquals(Long.MAX_VALUE, connection.idleAt);
}
@Test public void multipleAllocations() throws Exception {
Connection connection = new Connection(connectionPool);
connection.setAllocationLimit(2);
Connection.StreamAllocation a = connection.reserve("a");
Connection.StreamAllocation b = connection.reserve("b");
Connection.Stream a1 = a.newStream("a1");
Connection.Stream b1 = b.newStream("b1");
assertEquals(2, connection.size());
connection.release(a);
assertEquals(2, connection.size());
a.streamComplete(a1);
assertEquals(1, connection.size());
b.streamComplete(b1);
assertEquals(1, connection.size());
connection.release(b);
assertEquals(0, connection.size());
}
@Test public void lowerAndRaiseAllocationLimit() throws Exception {
Connection connection = new Connection(connectionPool);
connection.setAllocationLimit(2);
assertNotNull(connection.reserve("a"));
assertNotNull(connection.reserve("a"));
assertNull(connection.reserve("c"));
connection.setAllocationLimit(0);
assertEquals(2, connection.size());
assertNull(connection.reserve("d"));
connection.setAllocationLimit(3);
assertNotNull(connection.reserve("e"));
}
@Test public void leakedAllocation() throws Exception {
Connection connection = new Connection(connectionPool);
connection.setAllocationLimit(1);
reserveAndLeakAllocation(connection);
awaitGarbageCollection();
connection.pruneLeakedAllocations();
assertEquals(0, connection.size());
assertNull(connection.reserve("b")); // Can't allocate once a leak has been detected.
}
/** Use a helper method so there's no hidden reference remaining on the stack. */
private void reserveAndLeakAllocation(Connection connection) {
connection.reserve("a");
}
/**
* See FinalizationTester for discussion on how to best trigger GC in tests.
* https://android.googlesource.com/platform/libcore/+/master/support/src/test/java/libcore/
* java/lang/ref/FinalizationTester.java
*/
private void awaitGarbageCollection() throws InterruptedException {
Runtime.getRuntime().gc();
Thread.sleep(100);
System.runFinalization();
}
}
| |
/*
* Copyright (c) 2008-2010, Matthias Mann
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following
* conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Matthias Mann nor
* the names of its contributors may be used to endorse or promote products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.badlogic.gdx.graphics.g2d;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.files.FileHandle;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.Texture.TextureFilter;
import com.badlogic.gdx.graphics.g2d.GlyphLayout.GlyphRun;
import com.badlogic.gdx.graphics.g2d.TextureAtlas.AtlasRegion;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.Disposable;
import com.badlogic.gdx.utils.FloatArray;
import com.badlogic.gdx.utils.GdxRuntimeException;
import com.badlogic.gdx.utils.StreamUtils;
/** Renders bitmap fonts. The font consists of 2 files: an image file or {@link TextureRegion} containing the glyphs and a file in
* the AngleCode BMFont text format that describes where each glyph is on the image. Currently only a single image of glyphs is
* supported.
* <p>
* Text is drawn using a {@link Batch}. Text can be cached in a {@link BitmapFontCache} for faster rendering of static text, which
* saves needing to compute the location of each glyph each frame.
* <p>
* * The texture for a BitmapFont loaded from a file is managed. {@link #dispose()} must be called to free the texture when no
* longer needed. A BitmapFont loaded using a {@link TextureRegion} is managed if the region's texture is managed. Disposing the
* BitmapFont disposes the region's texture, which may not be desirable if the texture is still being used elsewhere.
* <p>
* The code was originally based on Matthias Mann's TWL BitmapFont class. Thanks for sharing, Matthias! :)
* @author Nathan Sweet
* @author Matthias Mann */
public class BitmapFont implements Disposable {
static private final int LOG2_PAGE_SIZE = 9;
static private final int PAGE_SIZE = 1 << LOG2_PAGE_SIZE;
static private final int PAGES = 0x10000 / PAGE_SIZE;
final BitmapFontData data;
Array<TextureRegion> regions;
private final BitmapFontCache cache;
private boolean flipped;
boolean integer;
private boolean ownsTexture;
/** Creates a BitmapFont using the default 15pt Arial font included in the libgdx JAR file. This is convenient to easily display
* text without bothering without generating a bitmap font yourself. */
public BitmapFont () {
this(Gdx.files.classpath("com/badlogic/gdx/utils/arial-15.fnt"),
Gdx.files.classpath("com/badlogic/gdx/utils/arial-15.png"), false, true);
}
/** Creates a BitmapFont using the default 15pt Arial font included in the libgdx JAR file. This is convenient to easily display
* text without bothering without generating a bitmap font yourself.
* @param flip If true, the glyphs will be flipped for use with a perspective where 0,0 is the upper left corner. */
public BitmapFont (boolean flip) {
this(Gdx.files.classpath("com/badlogic/gdx/utils/arial-15.fnt"),
Gdx.files.classpath("com/badlogic/gdx/utils/arial-15.png"), flip, true);
}
/** Creates a BitmapFont with the glyphs relative to the specified region. If the region is null, the glyph textures are loaded
* from the image file given in the font file. The {@link #dispose()} method will not dispose the region's texture in this
* case!
* <p>
* The font data is not flipped.
* @param fontFile the font definition file
* @param region The texture region containing the glyphs. The glyphs must be relative to the lower left corner (ie, the region
* should not be flipped). If the region is null the glyph images are loaded from the image path in the font file. */
public BitmapFont (FileHandle fontFile, TextureRegion region) {
this(fontFile, region, false);
}
/** Creates a BitmapFont with the glyphs relative to the specified region. If the region is null, the glyph textures are loaded
* from the image file given in the font file. The {@link #dispose()} method will not dispose the region's texture in this
* case!
* @param region The texture region containing the glyphs. The glyphs must be relative to the lower left corner (ie, the region
* should not be flipped). If the region is null the glyph images are loaded from the image path in the font file.
* @param flip If true, the glyphs will be flipped for use with a perspective where 0,0 is the upper left corner. */
public BitmapFont (FileHandle fontFile, TextureRegion region, boolean flip) {
this(new BitmapFontData(fontFile, flip), region, true);
}
/** Creates a BitmapFont from a BMFont file. The image file name is read from the BMFont file and the image is loaded from the
* same directory. The font data is not flipped. */
public BitmapFont (FileHandle fontFile) {
this(fontFile, false);
}
/** Creates a BitmapFont from a BMFont file. The image file name is read from the BMFont file and the image is loaded from the
* same directory.
* @param flip If true, the glyphs will be flipped for use with a perspective where 0,0 is the upper left corner. */
public BitmapFont (FileHandle fontFile, boolean flip) {
this(new BitmapFontData(fontFile, flip), (TextureRegion)null, true);
}
/** Creates a BitmapFont from a BMFont file, using the specified image for glyphs. Any image specified in the BMFont file is
* ignored.
* @param flip If true, the glyphs will be flipped for use with a perspective where 0,0 is the upper left corner. */
public BitmapFont (FileHandle fontFile, FileHandle imageFile, boolean flip) {
this(fontFile, imageFile, flip, true);
}
/** Creates a BitmapFont from a BMFont file, using the specified image for glyphs. Any image specified in the BMFont file is
* ignored.
* @param flip If true, the glyphs will be flipped for use with a perspective where 0,0 is the upper left corner.
* @param integer If true, rendering positions will be at integer values to avoid filtering artifacts. */
public BitmapFont (FileHandle fontFile, FileHandle imageFile, boolean flip, boolean integer) {
this(new BitmapFontData(fontFile, flip), new TextureRegion(new Texture(imageFile, false)), integer);
ownsTexture = true;
}
/** Constructs a new BitmapFont from the given {@link BitmapFontData} and {@link TextureRegion}. If the TextureRegion is null,
* the image path(s) will be read from the BitmapFontData. The dispose() method will not dispose the texture of the region(s)
* if the region is != null.
* <p>
* Passing a single TextureRegion assumes that your font only needs a single texture page. If you need to support multiple
* pages, either let the Font read the images themselves (by specifying null as the TextureRegion), or by specifying each page
* manually with the TextureRegion[] constructor.
* @param integer If true, rendering positions will be at integer values to avoid filtering artifacts. */
public BitmapFont (BitmapFontData data, TextureRegion region, boolean integer) {
this(data, region != null ? Array.with(region) : null, integer);
}
/** Constructs a new BitmapFont from the given {@link BitmapFontData} and array of {@link TextureRegion}. If the TextureRegion
* is null or empty, the image path(s) will be read from the BitmapFontData. The dispose() method will not dispose the texture
* of the region(s) if the regions array is != null and not empty.
* @param integer If true, rendering positions will be at integer values to avoid filtering artifacts. */
public BitmapFont (BitmapFontData data, Array<TextureRegion> pageRegions, boolean integer) {
if (pageRegions == null || pageRegions.size == 0) {
// Load each path.
int n = data.imagePaths.length;
regions = new Array(n);
for (int i = 0; i < n; i++) {
FileHandle file;
if (data.fontFile == null)
file = Gdx.files.internal(data.imagePaths[i]);
else
file = Gdx.files.getFileHandle(data.imagePaths[i], data.fontFile.type());
regions.add(new TextureRegion(new Texture(file, false)));
}
ownsTexture = true;
} else {
regions = pageRegions;
ownsTexture = false;
}
cache = newFontCache();
this.flipped = data.flipped;
this.data = data;
this.integer = integer;
load(data);
}
protected void load (BitmapFontData data) {
for (Glyph[] page : data.glyphs) {
if (page == null) continue;
for (Glyph glyph : page) {
if (glyph == null) continue;
TextureRegion region = regions.get(glyph.page);
if (region == null) {
// TODO: support null regions by parsing scaleW / scaleH ?
throw new IllegalArgumentException("BitmapFont texture region array cannot contain null elements.");
}
data.setGlyphRegion(glyph, region);
}
}
}
/** Draws text at the specified position.
* @see BitmapFontCache#addText(CharSequence, float, float) */
public GlyphLayout draw (Batch batch, CharSequence str, float x, float y) {
cache.clear();
GlyphLayout layout = cache.addText(str, x, y);
cache.draw(batch);
return layout;
}
/** Draws text at the specified position.
* @see BitmapFontCache#addText(CharSequence, float, float, int, int, float, int, boolean) */
public GlyphLayout draw (Batch batch, CharSequence str, float x, float y, float targetWidth, int halign, boolean wrap) {
cache.clear();
GlyphLayout layout = cache.addText(str, x, y, targetWidth, halign, wrap);
cache.draw(batch);
return layout;
}
/** Draws text at the specified position.
* @see BitmapFontCache#addText(CharSequence, float, float, int, int, float, int, boolean) */
public GlyphLayout draw (Batch batch, CharSequence str, float x, float y, int start, int end, float targetWidth, int halign,
boolean wrap) {
cache.clear();
GlyphLayout layout = cache.addText(str, x, y, start, end, targetWidth, halign, wrap);
cache.draw(batch);
return layout;
}
/** Draws text at the specified position.
* @see BitmapFontCache#addText(CharSequence, float, float, int, int, float, int, boolean) */
public void draw (Batch batch, GlyphLayout layout, float x, float y) {
cache.clear();
cache.addText(layout, x, y);
cache.draw(batch);
}
/** Returns the color of text drawn with this font. */
public Color getColor () {
return cache.getColor();
}
/** A convenience method for setting the font color. The color can also be set by modifying {@link #getColor()}. */
public void setColor (Color color) {
cache.getColor().set(color);
}
/** A convenience method for setting the font color. The color can also be set by modifying {@link #getColor()}. */
public void setColor (float r, float g, float b, float a) {
cache.getColor().set(r, g, b, a);
}
public float getScaleX () {
return data.scaleX;
}
public float getScaleY () {
return data.scaleY;
}
/** Returns the first texture region. This is included for backwards compatibility, and for convenience since most fonts only
* use one texture page. For multi-page fonts, use {@link #getRegions()}.
* @return the first texture region */
public TextureRegion getRegion () {
return regions.first();
}
/** Returns the array of TextureRegions that represents each texture page of glyphs.
* @return the array of texture regions; modifying it may produce undesirable results */
public Array<TextureRegion> getRegions () {
return regions;
}
/** Returns the texture page at the given index.
* @return the texture page at the given index */
public TextureRegion getRegion (int index) {
return regions.get(index);
}
/** Returns the line height, which is the distance from one line of text to the next. */
public float getLineHeight () {
return data.lineHeight;
}
/** Returns the width of the space character. */
public float getSpaceWidth () {
return data.spaceWidth;
}
/** Returns the x-height, which is the distance from the top of most lowercase characters to the baseline. */
public float getXHeight () {
return data.xHeight;
}
/** Returns the cap height, which is the distance from the top of most uppercase characters to the baseline. Since the drawing
* position is the cap height of the first line, the cap height can be used to get the location of the baseline. */
public float getCapHeight () {
return data.capHeight;
}
/** Returns the ascent, which is the distance from the cap height to the top of the tallest glyph. */
public float getAscent () {
return data.ascent;
}
/** Returns the descent, which is the distance from the bottom of the glyph that extends the lowest to the baseline. This number
* is negative. */
public float getDescent () {
return data.descent;
}
/** Returns true if this BitmapFont has been flipped for use with a y-down coordinate system. */
public boolean isFlipped () {
return flipped;
}
/** Disposes the texture used by this BitmapFont's region IF this BitmapFont created the texture. */
public void dispose () {
if (ownsTexture) {
for (int i = 0; i < regions.size; i++)
regions.get(i).getTexture().dispose();
}
}
/** Makes the specified glyphs fixed width. This can be useful to make the numbers in a font fixed width. Eg, when horizontally
* centering a score or loading percentage text, it will not jump around as different numbers are shown. */
public void setFixedWidthGlyphs (CharSequence glyphs) {
BitmapFontData data = this.data;
int maxAdvance = 0;
for (int index = 0, end = glyphs.length(); index < end; index++) {
Glyph g = data.getGlyph(glyphs.charAt(index));
if (g != null && g.xadvance > maxAdvance) maxAdvance = g.xadvance;
}
for (int index = 0, end = glyphs.length(); index < end; index++) {
Glyph g = data.getGlyph(glyphs.charAt(index));
if (g == null) continue;
g.xoffset += (maxAdvance - g.xadvance) / 2;
g.xadvance = maxAdvance;
g.kerning = null;
}
}
/** Specifies whether to use integer positions. Default is to use them so filtering doesn't kick in as badly. */
public void setUseIntegerPositions (boolean integer) {
this.integer = integer;
cache.setUseIntegerPositions(integer);
}
/** Checks whether this font uses integer positions for drawing. */
public boolean usesIntegerPositions () {
return integer;
}
/** For expert usage -- returns the BitmapFontCache used by this font, for rendering to a sprite batch. This can be used, for
* example, to manipulate glyph colors within a specific index.
* @return the bitmap font cache used by this font */
public BitmapFontCache getCache () {
return cache;
}
/** Gets the underlying {@link BitmapFontData} for this BitmapFont. */
public BitmapFontData getData () {
return data;
}
/** @return whether the texture is owned by the font, font disposes the texture itself if true */
public boolean ownsTexture () {
return ownsTexture;
}
/** Sets whether the font owns the texture. In case it does, the font will also dispose of the texture when {@link #dispose()}
* is called. Use with care!
* @param ownsTexture whether the font owns the texture */
public void setOwnsTexture (boolean ownsTexture) {
this.ownsTexture = ownsTexture;
}
/** Creates a new BitmapFontCache for this font. Using this method allows the font to provide the BitmapFontCache implementation
* to customize rendering.
* <p>
* Note this method is called by the BitmapFont constructors. If a subclass overrides this method, it will be called before the
* subclass constructors. */
public BitmapFontCache newFontCache () {
return new BitmapFontCache(this, integer);
}
public String toString () {
if (data.fontFile != null) return data.fontFile.nameWithoutExtension();
return super.toString();
}
/** Represents a single character in a font page. */
public static class Glyph {
public int id;
public int srcX;
public int srcY;
public int width, height;
public float u, v, u2, v2;
public int xoffset, yoffset;
public int xadvance;
public byte[][] kerning;
/** The index to the texture page that holds this glyph. */
public int page = 0;
public int getKerning (char ch) {
if (kerning != null) {
byte[] page = kerning[ch >>> LOG2_PAGE_SIZE];
if (page != null) return page[ch & PAGE_SIZE - 1];
}
return 0;
}
public void setKerning (int ch, int value) {
if (kerning == null) kerning = new byte[PAGES][];
byte[] page = kerning[ch >>> LOG2_PAGE_SIZE];
if (page == null) kerning[ch >>> LOG2_PAGE_SIZE] = page = new byte[PAGE_SIZE];
page[ch & PAGE_SIZE - 1] = (byte)value;
}
public String toString () {
return Character.toString((char)id);
}
}
static int indexOf (CharSequence text, char ch, int start) {
final int n = text.length();
for (; start < n; start++)
if (text.charAt(start) == ch) return start;
return n;
}
/** Backing data for a {@link BitmapFont}. */
static public class BitmapFontData {
/** An array of the image paths, for multiple texture pages. */
public String[] imagePaths;
public FileHandle fontFile;
public boolean flipped;
public float padTop, padRight, padBottom, padLeft;
/** The distance from one line of text to the next. */
public float lineHeight;
/** The distance from the top of most uppercase characters to the baseline. Since the drawing position is the cap height of
* the first line, the cap height can be used to get the location of the baseline. */
public float capHeight = 1;
/** The distance from the cap height to the top of the tallest glyph. */
public float ascent;
/** The distance from the bottom of the glyph that extends the lowest to the baseline. This number is negative. */
public float descent;
public float down;
public float scaleX = 1, scaleY = 1;
public boolean markupEnabled;
public final Glyph[][] glyphs = new Glyph[PAGES][];
/** The width of the space character. */
public float spaceWidth;
/** The x-height, which is the distance from the top of most lowercase characters to the baseline. */
public float xHeight = 1;
/** Additional characters besides whitespace where text is wrapped. Eg, a hypen (-). */
public char[] breakChars;
public char[] xChars = {'x', 'e', 'a', 'o', 'n', 's', 'r', 'c', 'u', 'm', 'v', 'w', 'z'};
public char[] capChars = {'M', 'N', 'B', 'D', 'C', 'E', 'F', 'K', 'A', 'G', 'H', 'I', 'J', 'L', 'O', 'P', 'Q', 'R', 'S',
'T', 'U', 'V', 'W', 'X', 'Y', 'Z'};
/** Creates an empty BitmapFontData for configuration before calling {@link #load(FileHandle, boolean)}, to subclass, or to
* populate yourself, e.g. using stb-truetype or FreeType. */
public BitmapFontData () {
}
public BitmapFontData (FileHandle fontFile, boolean flip) {
this.fontFile = fontFile;
this.flipped = flip;
load(fontFile, flip);
}
public void load (FileHandle fontFile, boolean flip) {
if (imagePaths != null) throw new IllegalStateException("Already loaded.");
BufferedReader reader = new BufferedReader(new InputStreamReader(fontFile.read()), 512);
try {
String line = reader.readLine(); // info
if (line == null) throw new GdxRuntimeException("File is empty.");
line = line.substring(line.indexOf("padding=") + 8);
String[] padding = line.substring(0, line.indexOf(' ')).split(",", 4);
if (padding.length != 4) throw new GdxRuntimeException("Invalid padding.");
padTop = Integer.parseInt(padding[0]);
padLeft = Integer.parseInt(padding[1]);
padBottom = Integer.parseInt(padding[2]);
padRight = Integer.parseInt(padding[3]);
float padY = padTop + padBottom;
line = reader.readLine();
if (line == null) throw new GdxRuntimeException("Missing common header.");
String[] common = line.split(" ", 7); // At most we want the 6th element; i.e. "page=N"
// At least lineHeight and base are required.
if (common.length < 3) throw new GdxRuntimeException("Invalid common header.");
if (!common[1].startsWith("lineHeight=")) throw new GdxRuntimeException("Missing: lineHeight");
lineHeight = Integer.parseInt(common[1].substring(11));
if (!common[2].startsWith("base=")) throw new GdxRuntimeException("Missing: base");
float baseLine = Integer.parseInt(common[2].substring(5));
int pageCount = 1;
if (common.length >= 6 && common[5] != null && common[5].startsWith("pages=")) {
try {
pageCount = Math.max(1, Integer.parseInt(common[5].substring(6)));
} catch (NumberFormatException ignored) { // Use one page.
}
}
imagePaths = new String[pageCount];
// Read each page definition.
for (int p = 0; p < pageCount; p++) {
// Read each "page" info line.
line = reader.readLine();
if (line == null) throw new GdxRuntimeException("Missing additional page definitions.");
String[] pageLine = line.split(" ", 4);
if (!pageLine[2].startsWith("file=")) throw new GdxRuntimeException("Missing: file");
// Expect ID to mean "index".
if (pageLine[1].startsWith("id=")) {
try {
int pageID = Integer.parseInt(pageLine[1].substring(3));
if (pageID != p)
throw new GdxRuntimeException("Page IDs must be indices starting at 0: " + pageLine[1].substring(3));
} catch (NumberFormatException ex) {
throw new GdxRuntimeException("Invalid page id: " + pageLine[1].substring(3), ex);
}
}
String fileName = null;
if (pageLine[2].endsWith("\"")) {
fileName = pageLine[2].substring(6, pageLine[2].length() - 1);
} else {
fileName = pageLine[2].substring(5, pageLine[2].length());
}
imagePaths[p] = fontFile.parent().child(fileName).path().replaceAll("\\\\", "/");
}
descent = 0;
while (true) {
line = reader.readLine();
if (line == null) break; // EOF
if (line.startsWith("kernings ")) break; // Starting kernings block.
if (!line.startsWith("char ")) continue;
Glyph glyph = new Glyph();
StringTokenizer tokens = new StringTokenizer(line, " =");
tokens.nextToken();
tokens.nextToken();
int ch = Integer.parseInt(tokens.nextToken());
if (ch <= Character.MAX_VALUE)
setGlyph(ch, glyph);
else
continue;
glyph.id = ch;
tokens.nextToken();
glyph.srcX = Integer.parseInt(tokens.nextToken());
tokens.nextToken();
glyph.srcY = Integer.parseInt(tokens.nextToken());
tokens.nextToken();
glyph.width = Integer.parseInt(tokens.nextToken());
tokens.nextToken();
glyph.height = Integer.parseInt(tokens.nextToken());
tokens.nextToken();
glyph.xoffset = Integer.parseInt(tokens.nextToken());
tokens.nextToken();
if (flip)
glyph.yoffset = Integer.parseInt(tokens.nextToken());
else
glyph.yoffset = -(glyph.height + Integer.parseInt(tokens.nextToken()));
tokens.nextToken();
glyph.xadvance = Integer.parseInt(tokens.nextToken());
// Check for page safely, it could be omitted or invalid.
if (tokens.hasMoreTokens()) tokens.nextToken();
if (tokens.hasMoreTokens()) {
try {
glyph.page = Integer.parseInt(tokens.nextToken());
} catch (NumberFormatException ignored) {
}
}
if (glyph.width > 0 && glyph.height > 0) descent = Math.min(baseLine + glyph.yoffset, descent);
}
descent += padBottom;
while (true) {
line = reader.readLine();
if (line == null) break;
if (!line.startsWith("kerning ")) break;
StringTokenizer tokens = new StringTokenizer(line, " =");
tokens.nextToken();
tokens.nextToken();
int first = Integer.parseInt(tokens.nextToken());
tokens.nextToken();
int second = Integer.parseInt(tokens.nextToken());
if (first < 0 || first > Character.MAX_VALUE || second < 0 || second > Character.MAX_VALUE) continue;
Glyph glyph = getGlyph((char)first);
tokens.nextToken();
int amount = Integer.parseInt(tokens.nextToken());
if (glyph != null) { // Kernings may exist for glyph pairs not contained in the font.
glyph.setKerning(second, amount);
}
}
Glyph spaceGlyph = getGlyph(' ');
if (spaceGlyph == null) {
spaceGlyph = new Glyph();
spaceGlyph.id = (int)' ';
Glyph xadvanceGlyph = getGlyph('l');
if (xadvanceGlyph == null) xadvanceGlyph = getFirstGlyph();
spaceGlyph.xadvance = xadvanceGlyph.xadvance;
setGlyph(' ', spaceGlyph);
}
if (spaceGlyph.width == 0) spaceGlyph.width = (int)(spaceGlyph.xadvance + padRight);
spaceWidth = spaceGlyph.width;
Glyph xGlyph = null;
for (int i = 0; i < xChars.length; i++) {
xGlyph = getGlyph(xChars[i]);
if (xGlyph != null) break;
}
if (xGlyph == null) xGlyph = getFirstGlyph();
xHeight = xGlyph.height - padY;
Glyph capGlyph = null;
for (int i = 0; i < capChars.length; i++) {
capGlyph = getGlyph(capChars[i]);
if (capGlyph != null) break;
}
if (capGlyph == null) {
for (Glyph[] page : this.glyphs) {
if (page == null) continue;
for (Glyph glyph : page) {
if (glyph == null || glyph.height == 0 || glyph.width == 0) continue;
capHeight = Math.max(capHeight, glyph.height);
}
}
} else
capHeight = capGlyph.height;
capHeight -= padY;
ascent = baseLine - capHeight;
down = -lineHeight;
if (flip) {
ascent = -ascent;
down = -down;
}
} catch (Exception ex) {
throw new GdxRuntimeException("Error loading font file: " + fontFile, ex);
} finally {
StreamUtils.closeQuietly(reader);
}
}
public void setGlyphRegion (Glyph glyph, TextureRegion region) {
Texture texture = region.getTexture();
float invTexWidth = 1.0f / texture.getWidth();
float invTexHeight = 1.0f / texture.getHeight();
float offsetX = 0, offsetY = 0;
float u = region.u;
float v = region.v;
float regionWidth = region.getRegionWidth();
float regionHeight = region.getRegionHeight();
if (region instanceof AtlasRegion) {
// Compensate for whitespace stripped from left and top edges.
AtlasRegion atlasRegion = (AtlasRegion)region;
offsetX = atlasRegion.offsetX;
offsetY = atlasRegion.originalHeight - atlasRegion.packedHeight - atlasRegion.offsetY;
}
float x = glyph.srcX;
float x2 = glyph.srcX + glyph.width;
float y = glyph.srcY;
float y2 = glyph.srcY + glyph.height;
// Shift glyph for left and top edge stripped whitespace. Clip glyph for right and bottom edge stripped whitespace.
if (offsetX > 0) {
x -= offsetX;
if (x < 0) {
glyph.width += x;
glyph.xoffset -= x;
x = 0;
}
x2 -= offsetX;
if (x2 > regionWidth) {
glyph.width -= x2 - regionWidth;
x2 = regionWidth;
}
}
if (offsetY > 0) {
y -= offsetY;
if (y < 0) {
glyph.height += y;
y = 0;
}
y2 -= offsetY;
if (y2 > regionHeight) {
float amount = y2 - regionHeight;
glyph.height -= amount;
glyph.yoffset += amount;
y2 = regionHeight;
}
}
glyph.u = u + x * invTexWidth;
glyph.u2 = u + x2 * invTexWidth;
if (flipped) {
glyph.v = v + y * invTexHeight;
glyph.v2 = v + y2 * invTexHeight;
} else {
glyph.v2 = v + y * invTexHeight;
glyph.v = v + y2 * invTexHeight;
}
}
/** Sets the line height, which is the distance from one line of text to the next. */
public void setLineHeight (float height) {
lineHeight = height * scaleY;
down = flipped ? lineHeight : -lineHeight;
}
public void setGlyph (int ch, Glyph glyph) {
Glyph[] page = glyphs[ch / PAGE_SIZE];
if (page == null) glyphs[ch / PAGE_SIZE] = page = new Glyph[PAGE_SIZE];
page[ch & PAGE_SIZE - 1] = glyph;
}
public Glyph getFirstGlyph () {
for (Glyph[] page : this.glyphs) {
if (page == null) continue;
for (Glyph glyph : page) {
if (glyph == null || glyph.height == 0 || glyph.width == 0) continue;
return glyph;
}
}
throw new GdxRuntimeException("No glyphs found.");
}
public boolean hasGlyph (char ch) {
return getGlyph(ch) != null;
}
/** Returns the glyph for the specified character, or null if no such glyph exists. Note that
* {@link #getGlyphs(GlyphRun, CharSequence, int, int)} should be be used to shape a string of characters into a list of
* glyphs. */
public Glyph getGlyph (char ch) {
Glyph[] page = glyphs[ch / PAGE_SIZE];
if (page != null) return page[ch & PAGE_SIZE - 1];
return null;
}
/** Using the specified string, populates the glyphs and positions of the specified glyph run.
* @param str Characters to convert to glyphs. Will not contain newline or color tags. May contain "[[" for an escaped left
* square bracket. */
public void getGlyphs (GlyphRun run, CharSequence str, int start, int end) {
boolean markupEnabled = this.markupEnabled;
float scaleX = this.scaleX;
Array<Glyph> glyphs = run.glyphs;
FloatArray xAdvances = run.xAdvances;
Glyph lastGlyph = null;
while (start < end) {
char ch = str.charAt(start++);
Glyph glyph = getGlyph(ch);
if (glyph == null) continue;
glyphs.add(glyph);
if (lastGlyph == null)
xAdvances.add(-glyph.xoffset * scaleX - padLeft); // First glyph.
else
xAdvances.add((lastGlyph.xadvance + lastGlyph.getKerning(ch)) * scaleX);
lastGlyph = glyph;
// "[[" is an escaped left square bracket, skip second character.
if (markupEnabled && ch == '[' && start < end && str.charAt(start) == '[') start++;
}
if (lastGlyph != null) xAdvances.add((lastGlyph.xoffset + lastGlyph.width) * scaleX - padRight);
}
/** Returns the first valid glyph index to use to wrap to the next line, starting at the specified start index and
* (typically) moving toward the beginning of the glyphs array. */
public int getWrapIndex (Array<Glyph> glyphs, int start) {
char ch = (char)glyphs.get(start).id;
if (isWhitespace(ch)) return start;
for (int i = start - 1; i >= 1; i--) {
ch = (char)glyphs.get(i).id;
if (isWhitespace(ch)) return i;
if (isBreakChar(ch)) return i + 1;
}
return 0;
}
public boolean isBreakChar (char c) {
if (breakChars == null) return false;
for (char br : breakChars)
if (c == br) return true;
return false;
}
public boolean isWhitespace (char c) {
switch (c) {
case '\n':
case '\r':
case '\t':
case ' ':
return true;
default:
return false;
}
}
/** Returns the image path for the texture page at the given index (the "id" in the BMFont file). */
public String getImagePath (int index) {
return imagePaths[index];
}
public String[] getImagePaths () {
return imagePaths;
}
public FileHandle getFontFile () {
return fontFile;
}
/** Scales the font by the specified amounts on both axes
* <p>
* Note that smoother scaling can be achieved if the texture backing the BitmapFont is using {@link TextureFilter#Linear}.
* The default is Nearest, so use a BitmapFont constructor that takes a {@link TextureRegion}.
* @throws IllegalArgumentException if scaleX or scaleY is zero. */
public void setScale (float scaleX, float scaleY) {
if (scaleX == 0) throw new IllegalArgumentException("scaleX cannot be 0.");
if (scaleY == 0) throw new IllegalArgumentException("scaleY cannot be 0.");
float x = scaleX / this.scaleX;
float y = scaleY / this.scaleY;
lineHeight *= y;
spaceWidth *= x;
xHeight *= y;
capHeight *= y;
ascent *= y;
descent *= y;
down *= y;
padTop *= y;
padLeft *= y;
padBottom *= y;
padRight *= y;
this.scaleX = scaleX;
this.scaleY = scaleY;
}
/** Scales the font by the specified amount in both directions.
* @see #setScale(float, float)
* @throws IllegalArgumentException if scaleX or scaleY is zero. */
public void setScale (float scaleXY) {
setScale(scaleXY, scaleXY);
}
/** Sets the font's scale relative to the current scale.
* @see #setScale(float, float)
* @throws IllegalArgumentException if the resulting scale is zero. */
public void scale (float amount) {
setScale(scaleX + amount, scaleY + amount);
}
}
}
| |
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package g7.bluesky.launcher3;
import android.annotation.TargetApi;
import android.app.SearchManager;
import android.content.ComponentName;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.database.ContentObserver;
import android.graphics.Point;
import android.os.Build;
import android.os.Handler;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Display;
import android.view.WindowManager;
import g7.bluesky.launcher3.compat.LauncherAppsCompat;
import g7.bluesky.launcher3.compat.PackageInstallerCompat;
import g7.bluesky.launcher3.compat.PackageInstallerCompat.PackageInstallInfo;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
public class LauncherAppState implements DeviceProfile.DeviceProfileCallbacks {
private static final String TAG = "LauncherAppState";
private static final boolean DEBUG = false;
private final AppFilter mAppFilter;
private final BuildInfo mBuildInfo;
private final LauncherModel mModel;
private final IconCache mIconCache;
private final boolean mIsScreenLarge;
private final float mScreenDensity;
private final int mLongPressTimeout = 300;
private WidgetPreviewLoader.CacheDb mWidgetPreviewCacheDb;
private boolean mWallpaperChangedSinceLastCheck;
private static WeakReference<LauncherProvider> sLauncherProvider;
private static Context sContext;
private static LauncherAppState INSTANCE;
private DynamicGrid mDynamicGrid;
public static LauncherAppState getInstance() {
if (INSTANCE == null) {
INSTANCE = new LauncherAppState();
}
return INSTANCE;
}
public static LauncherAppState getInstanceNoCreate() {
return INSTANCE;
}
public Context getContext() {
return sContext;
}
public static void setApplicationContext(Context context) {
if (sContext != null) {
Log.w(Launcher.TAG, "setApplicationContext called twice! old=" + sContext + " new=" + context);
}
sContext = context.getApplicationContext();
}
private LauncherAppState() {
if (sContext == null) {
throw new IllegalStateException("LauncherAppState inited before app context set");
}
Log.v(Launcher.TAG, "LauncherAppState inited");
if (sContext.getResources().getBoolean(R.bool.debug_memory_enabled)) {
MemoryTracker.startTrackingMe(sContext, "L");
}
// set sIsScreenXLarge and mScreenDensity *before* creating icon cache
mIsScreenLarge = isScreenLarge(sContext.getResources());
mScreenDensity = sContext.getResources().getDisplayMetrics().density;
recreateWidgetPreviewDb();
mIconCache = new IconCache(sContext);
mAppFilter = AppFilter.loadByName(sContext.getString(R.string.app_filter_class));
mBuildInfo = BuildInfo.loadByName(sContext.getString(R.string.build_info_class));
mModel = new LauncherModel(this, mIconCache, mAppFilter);
final LauncherAppsCompat launcherApps = LauncherAppsCompat.getInstance(sContext);
launcherApps.addOnAppsChangedCallback(mModel);
// Register intent receivers
IntentFilter filter = new IntentFilter();
filter.addAction(Intent.ACTION_LOCALE_CHANGED);
filter.addAction(Intent.ACTION_CONFIGURATION_CHANGED);
sContext.registerReceiver(mModel, filter);
filter = new IntentFilter();
filter.addAction(SearchManager.INTENT_GLOBAL_SEARCH_ACTIVITY_CHANGED);
sContext.registerReceiver(mModel, filter);
filter = new IntentFilter();
filter.addAction(SearchManager.INTENT_ACTION_SEARCHABLES_CHANGED);
sContext.registerReceiver(mModel, filter);
// Register for changes to the favorites
ContentResolver resolver = sContext.getContentResolver();
resolver.registerContentObserver(LauncherSettings.Favorites.CONTENT_URI, true,
mFavoritesObserver);
}
public void recreateWidgetPreviewDb() {
if (mWidgetPreviewCacheDb != null) {
mWidgetPreviewCacheDb.close();
}
mWidgetPreviewCacheDb = new WidgetPreviewLoader.CacheDb(sContext);
}
/**
* Call from Application.onTerminate(), which is not guaranteed to ever be called.
*/
public void onTerminate() {
sContext.unregisterReceiver(mModel);
final LauncherAppsCompat launcherApps = LauncherAppsCompat.getInstance(sContext);
launcherApps.removeOnAppsChangedCallback(mModel);
PackageInstallerCompat.getInstance(sContext).onStop();
ContentResolver resolver = sContext.getContentResolver();
resolver.unregisterContentObserver(mFavoritesObserver);
}
/**
* Receives notifications whenever the user favorites have changed.
*/
private final ContentObserver mFavoritesObserver = new ContentObserver(new Handler()) {
@Override
public void onChange(boolean selfChange) {
// If the database has ever changed, then we really need to force a reload of the
// workspace on the next load
mModel.resetLoadedState(false, true);
mModel.startLoaderFromBackground();
}
};
LauncherModel setLauncher(Launcher launcher) {
mModel.initialize(launcher);
return mModel;
}
public IconCache getIconCache() {
return mIconCache;
}
LauncherModel getModel() {
return mModel;
}
boolean shouldShowAppOrWidgetProvider(ComponentName componentName) {
return mAppFilter == null || mAppFilter.shouldShowApp(componentName);
}
WidgetPreviewLoader.CacheDb getWidgetPreviewCacheDb() {
return mWidgetPreviewCacheDb;
}
static void setLauncherProvider(LauncherProvider provider) {
sLauncherProvider = new WeakReference<LauncherProvider>(provider);
}
static LauncherProvider getLauncherProvider() {
return sLauncherProvider.get();
}
public static String getSharedPreferencesKey() {
return LauncherFiles.SHARED_PREFERENCES_KEY;
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
DeviceProfile initDynamicGrid(Context context) {
mDynamicGrid = createDynamicGrid(context, mDynamicGrid);
mDynamicGrid.getDeviceProfile().addCallback(this);
return mDynamicGrid.getDeviceProfile();
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
static DynamicGrid createDynamicGrid(Context context, DynamicGrid dynamicGrid) {
// Determine the dynamic grid properties
WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
Display display = wm.getDefaultDisplay();
Point realSize = new Point();
display.getRealSize(realSize);
DisplayMetrics dm = new DisplayMetrics();
display.getMetrics(dm);
if (dynamicGrid == null) {
Point smallestSize = new Point();
Point largestSize = new Point();
display.getCurrentSizeRange(smallestSize, largestSize);
dynamicGrid = new DynamicGrid(context,
context.getResources(),
Math.min(smallestSize.x, smallestSize.y),
Math.min(largestSize.x, largestSize.y),
realSize.x, realSize.y,
dm.widthPixels, dm.heightPixels);
}
// Update the icon size
DeviceProfile grid = dynamicGrid.getDeviceProfile();
grid.updateFromConfiguration(context, context.getResources(),
realSize.x, realSize.y,
dm.widthPixels, dm.heightPixels);
return dynamicGrid;
}
public DynamicGrid getDynamicGrid() {
return mDynamicGrid;
}
public boolean isScreenLarge() {
return mIsScreenLarge;
}
// Need a version that doesn't require an instance of LauncherAppState for the wallpaper picker
public static boolean isScreenLarge(Resources res) {
return res.getBoolean(R.bool.is_large_tablet);
}
public static boolean isScreenLandscape(Context context) {
return context.getResources().getConfiguration().orientation ==
Configuration.ORIENTATION_LANDSCAPE;
}
public float getScreenDensity() {
return mScreenDensity;
}
public int getLongPressTimeout() {
return mLongPressTimeout;
}
public void onWallpaperChanged() {
mWallpaperChangedSinceLastCheck = true;
}
public boolean hasWallpaperChangedSinceLastCheck() {
boolean result = mWallpaperChangedSinceLastCheck;
mWallpaperChangedSinceLastCheck = false;
return result;
}
@Override
public void onAvailableSizeChanged(DeviceProfile grid) {
Utilities.setIconSize(grid.iconSizePx);
}
public static boolean isDisableAllApps() {
// Returns false on non-dogfood builds.
return getInstance().mBuildInfo.isDogfoodBuild() &&
Utilities.isPropertyEnabled(Launcher.DISABLE_ALL_APPS_PROPERTY);
}
public static boolean isDogfoodBuild() {
return getInstance().mBuildInfo.isDogfoodBuild();
}
public void setPackageState(ArrayList<PackageInstallInfo> installInfo) {
mModel.setPackageState(installInfo);
}
/**
* Updates the icons and label of all icons for the provided package name.
*/
public void updatePackageBadge(String packageName) {
mModel.updatePackageBadge(packageName);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.openwire.tool;
import java.io.File;
import java.io.FileWriter;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.taskdefs.FixCRLF;
import org.codehaus.jam.JClass;
import org.codehaus.jam.JProperty;
import org.codehaus.jam.JamClassIterator;
/**
*
*/
public abstract class MultiSourceGenerator extends OpenWireGenerator {
protected Set<String> manuallyMaintainedClasses = new HashSet<String>();
protected File destDir;
protected File destFile;
protected JClass jclass;
protected JClass superclass;
protected String simpleName;
protected String className;
protected String baseClass;
protected StringBuffer buffer;
public MultiSourceGenerator() {
initialiseManuallyMaintainedClasses();
}
public Object run() {
if (destDir == null) {
throw new IllegalArgumentException("No destDir defined!");
}
System.out.println(getClass().getName() + " generating files in: " + destDir);
destDir.mkdirs();
buffer = new StringBuffer();
JamClassIterator iter = getClasses();
while (iter.hasNext()) {
try {
jclass = iter.nextClass();
if (isValidClass(jclass)) {
processClass(jclass);
}
} catch (Exception e) {
System.err.println("Unable to process: " + jclass);
e.printStackTrace();
}
}
return null;
}
/**
* Returns all the valid properties available on the current class
*/
public List<JProperty> getProperties() {
List<JProperty> answer = new ArrayList<JProperty>();
JProperty[] properties = jclass.getDeclaredProperties();
for (int i = 0; i < properties.length; i++) {
JProperty property = properties[i];
if (isValidProperty(property)) {
answer.add(property);
}
}
return answer;
}
protected boolean isValidClass(JClass jclass) {
if (jclass.getAnnotation("openwire:marshaller") == null) {
return false;
}
return !manuallyMaintainedClasses.contains(jclass.getSimpleName());
}
protected void processClass(JClass jclass) {
simpleName = jclass.getSimpleName();
superclass = jclass.getSuperclass();
System.out.println(getClass().getName() + " processing class: " + simpleName);
className = getClassName(jclass);
destFile = new File(destDir, className + filePostFix);
baseClass = getBaseClassName(jclass);
PrintWriter out = null;
try {
out = new PrintWriter(new FileWriter(destFile));
generateFile(out);
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
if (out != null) {
out.close();
}
}
// Use the FixCRLF Ant Task to make sure the file has consistent
// newlines
// so that SVN does not complain on checkin.
Project project = new Project();
project.init();
FixCRLF fixCRLF = new FixCRLF();
fixCRLF.setProject(project);
fixCRLF.setSrcdir(destFile.getParentFile());
fixCRLF.setIncludes(destFile.getName());
fixCRLF.execute();
}
protected abstract void generateFile(PrintWriter out) throws Exception;
protected String getBaseClassName(JClass jclass) {
String answer = "BaseDataStructure";
if (superclass != null) {
String name = superclass.getSimpleName();
if (name != null && !name.equals("Object")) {
answer = name;
}
}
return answer;
}
protected String getClassName(JClass jclass) {
return jclass.getSimpleName();
}
public boolean isAbstractClass() {
return jclass != null && jclass.isAbstract();
}
public String getAbstractClassText() {
return isAbstractClass() ? "abstract " : "";
}
public boolean isMarshallerAware() {
return isMarshallAware(jclass);
}
protected void initialiseManuallyMaintainedClasses() {
String[] names = {
"ActiveMQDestination", "ActiveMQTempDestination", "ActiveMQQueue", "ActiveMQTopic", "ActiveMQTempQueue", "ActiveMQTempTopic", "BaseCommand", "ActiveMQMessage", "ActiveMQTextMessage",
"ActiveMQMapMessage", "ActiveMQBytesMessage", "ActiveMQStreamMessage", "ActiveMQBlobMessage", "DataStructureSupport", "WireFormatInfo", "ActiveMQObjectMessage"
};
for (int i = 0; i < names.length; i++) {
manuallyMaintainedClasses.add(names[i]);
}
}
public String getBaseClass() {
return baseClass;
}
public void setBaseClass(String baseClass) {
this.baseClass = baseClass;
}
public String getClassName() {
return className;
}
public void setClassName(String className) {
this.className = className;
}
public File getDestDir() {
return destDir;
}
public void setDestDir(File destDir) {
this.destDir = destDir;
}
public File getDestFile() {
return destFile;
}
public void setDestFile(File destFile) {
this.destFile = destFile;
}
public JClass getJclass() {
return jclass;
}
public void setJclass(JClass jclass) {
this.jclass = jclass;
}
public Set<String> getManuallyMaintainedClasses() {
return manuallyMaintainedClasses;
}
public void setManuallyMaintainedClasses(Set<String> manuallyMaintainedClasses) {
this.manuallyMaintainedClasses = manuallyMaintainedClasses;
}
public String getSimpleName() {
return simpleName;
}
public void setSimpleName(String simpleName) {
this.simpleName = simpleName;
}
public JClass getSuperclass() {
return superclass;
}
public void setSuperclass(JClass superclass) {
this.superclass = superclass;
}
}
| |
// Copyright 2020 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package build.buildfarm.common.redis;
import build.buildfarm.common.ExecutionProperties;
import build.buildfarm.common.MapUtils;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.SetMultimap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @class ProvisionedRedisQueue
* @brief A queue that is designed to hold particularly provisioned elements.
* @details A provisioned redis queue is an implementation of a queue data structure which
* internally uses a redis cluster to distribute the data across shards. Its important to know
* that the lifetime of the queue persists before and after the queue data structure is created
* (since it exists in redis). Therefore, two redis queues with the same name, would in fact be
* the same underlying redis queue. This redis queue comes with a list of required provisions.
* If the queue element does not meet the required provisions, it should not be stored in the
* queue. Provision queues are intended to represent particular operations that should only be
* processed by particular workers. An example use case for this would be to have two dedicated
* provision queues for CPU and GPU operations. CPU/GPU requirements would be determined through
* the remote api's command platform properties. We designate provision queues to have a set of
* "required provisions" (which match the platform properties). This allows the scheduler to
* distribute operations by their properties and allows workers to dequeue from particular
* queues.
*/
public class ProvisionedRedisQueue {
/**
* @field WILDCARD_VALUE
* @brief Wildcard value.
* @details Symbol for identifying wildcard in both key/value of provisions.
*/
public static final String WILDCARD_VALUE = "*";
/**
* @field isFullyWildcard
* @brief If the queue will deem any set of properties eligible.
* @details If any of the provision keys has a wildcard, we consider anything for the queue to be
* eligible.
*/
private final boolean isFullyWildcard;
/**
* @field allowUserUnmatched
* @brief Can the user provide extra platform properties that are not a part of the queue and
* still be matched with it?
* @details If true, the user can provide a superset of platform properties and still be matched
* with the queue.
*/
private final boolean allowUserUnmatched;
/**
* @field provisions
* @brief Provisions enforced by the queue.
* @details The provisions are filtered by wildcard.
*/
private final FilteredProvisions provisions;
/**
* @field queue
* @brief The queue itself.
* @details A balanced redis queue designed to hold particularly provisioned elements.
*/
private final BalancedRedisQueue queue;
/**
* @brief Constructor.
* @details Construct the provision queue.
* @param name The global name of the queue.
* @param hashtags Hashtags to distribute queue data.
* @param filterProvisions The filtered provisions of the queue.
* @note Overloaded.
*/
public ProvisionedRedisQueue(
String name, List<String> hashtags, SetMultimap<String, String> filterProvisions) {
this.queue = new BalancedRedisQueue(name, hashtags);
isFullyWildcard = filterProvisions.containsKey(WILDCARD_VALUE);
provisions = filterProvisionsByWildcard(filterProvisions, isFullyWildcard);
allowUserUnmatched = false;
}
/**
* @brief Constructor.
* @details Construct the provision queue.
* @param name The global name of the queue.
* @param hashtags Hashtags to distribute queue data.
* @param filterProvisions The filtered provisions of the queue.
* @param allowUserUnmatched Whether the user can provide extra platform properties and still
* match the queue.
* @note Overloaded.
*/
public ProvisionedRedisQueue(
String name,
List<String> hashtags,
SetMultimap<String, String> filterProvisions,
boolean allowUserUnmatched) {
this.queue = new BalancedRedisQueue(name, hashtags);
isFullyWildcard = filterProvisions.containsKey(WILDCARD_VALUE);
provisions = filterProvisionsByWildcard(filterProvisions, isFullyWildcard);
this.allowUserUnmatched = allowUserUnmatched;
}
/**
* @brief Checks required properties.
* @details Checks whether the properties given fulfill all of the required provisions of the
* queue.
* @param properties Properties to check that requirements are met.
* @return Whether the queue is eligible based on the properties given.
* @note Suggested return identifier: isEligible.
*/
public boolean isEligible(SetMultimap<String, String> properties) {
// check if a property is specifically requesting to match with the queue
// any attempt to specifically match will not evaluate other properties
Set<String> selected = properties.get(ExecutionProperties.CHOOSE_QUEUE);
if (!selected.isEmpty()) {
return selected.contains(queue.getName());
}
// fully wildcarded queues are always eligible
if (isFullyWildcard) {
return true;
}
// all required non-wildcard provisions must be matched
Set<Map.Entry<String, String>> requirements = new HashSet<>(provisions.required);
for (Map.Entry<String, String> property : properties.entries()) {
// for each of the properties specified, we must match requirements
if (!provisions.wildcard.contains(property.getKey())
&& !requirements.remove(property)
&& !allowUserUnmatched) {
return false;
}
}
return requirements.isEmpty();
}
/**
* @brief Explain eligibility.
* @details Returns an explanation as to why the properties provided are eligible / ineligible to
* be placed on the queue.
* @param properties Properties to get an eligibility explanation of.
* @return An explanation on the eligibility of the provided properties.
* @note Suggested return identifier: explanation.
*/
public String explainEligibility(SetMultimap<String, String> properties) {
EligibilityResult result = getEligibilityResult(properties);
return toString(result);
}
/**
* @brief Get queue.
* @details Obtain the internal queue.
* @return The internal queue.
* @note Suggested return identifier: queue.
*/
public BalancedRedisQueue queue() {
return queue;
}
/**
* @brief Filter the provisions into separate sets by checking for the existence of wildcards.
* @details This will organize the incoming provisions into separate sets.
* @param filterProvisions The filtered provisions of the queue.
* @param isFullyWildcard If the queue will deem any set of properties eligible.
* @return Provisions filtered by wildcard.
* @note Suggested return identifier: filteredProvisions.
*/
private static FilteredProvisions filterProvisionsByWildcard(
SetMultimap<String, String> filterProvisions, boolean isFullyWildcard) {
FilteredProvisions provisions = new FilteredProvisions();
provisions.wildcard =
isFullyWildcard
? ImmutableSet.of()
: filterProvisions.asMap().entrySet().stream()
.filter(e -> e.getValue().contains(ProvisionedRedisQueue.WILDCARD_VALUE))
.map(Map.Entry::getKey)
.collect(ImmutableSet.toImmutableSet());
provisions.required =
isFullyWildcard
? ImmutableSet.of()
: filterProvisions.entries().stream()
.filter(e -> !provisions.wildcard.contains(e.getKey()))
.collect(ImmutableSet.toImmutableSet());
return provisions;
}
/**
* @brief Get eligibility result.
* @details Perform eligibility check with detailed information on evaluation.
* @param properties Properties to get an eligibility explanation of.
* @return Detailed results on the evaluation of an eligibility check.
* @note Suggested return identifier: eligibilityResult.
*/
private EligibilityResult getEligibilityResult(SetMultimap<String, String> properties) {
EligibilityResult result = new EligibilityResult();
result.queueName = queue.getName();
result.isEligible = isEligible(properties);
result.isFullyWildcard = isFullyWildcard;
result.isSpecificallyChosen = false;
result.allowsUnmatched = allowUserUnmatched;
// check if a property is specifically requesting to match with the queue
// any attempt to specifically match will not evaluate other properties
Set<String> selected = properties.get(ExecutionProperties.CHOOSE_QUEUE);
if (!selected.isEmpty()) {
result.isSpecificallyChosen = selected.contains(queue.getName());
}
// gather matched, unmatched, and still required properties
ImmutableSetMultimap.Builder<String, String> matched = ImmutableSetMultimap.builder();
ImmutableSetMultimap.Builder<String, String> unmatched = ImmutableSetMultimap.builder();
ImmutableSetMultimap.Builder<String, String> stillRequired = ImmutableSetMultimap.builder();
Set<Map.Entry<String, String>> requirements = new HashSet<>(provisions.required);
for (Map.Entry<String, String> property : properties.entries()) {
if (!provisions.wildcard.contains(property.getKey()) && !requirements.remove(property)) {
unmatched.put(property);
} else {
matched.put(property);
}
}
stillRequired.putAll(requirements);
result.matched = matched.build();
result.unmatched = unmatched.build();
result.stillRequired = stillRequired.build();
return result;
}
/**
* @brief Convert eligibility result to printable string.
* @details Used for visibility / debugging.
* @param result Detailed results on the evaluation of an eligibility check.
* @return An explanation on the eligibility of the provided properties.
* @note Suggested return identifier: explanation.
*/
private static String toString(EligibilityResult result) {
String explanation = "";
if (result.isEligible) {
explanation += "The properties are eligible for the " + result.queueName + " queue.\n";
} else {
explanation += "The properties are not eligible for the " + result.queueName + " queue.\n";
}
if (result.isSpecificallyChosen) {
explanation += "The queue was specifically chosen.\n";
return explanation;
}
if (result.isFullyWildcard) {
explanation += "The queue is fully wildcard.\n";
return explanation;
}
explanation += "matched: " + MapUtils.toString(result.matched.asMap()) + "\n";
explanation += "unmatched: " + MapUtils.toString(result.unmatched.asMap()) + "\n";
explanation += "still required: " + MapUtils.toString(result.stillRequired.asMap()) + "\n";
return explanation;
}
}
| |
/**
*/
package CIM.IEC61970.Informative.InfERPSupport.impl;
import CIM.IEC61968.Common.impl.DocumentImpl;
import CIM.IEC61968.Work.Work;
import CIM.IEC61968.Work.WorkPackage;
import CIM.IEC61970.Informative.InfERPSupport.ErpProjectAccounting;
import CIM.IEC61970.Informative.InfERPSupport.ErpTimeEntry;
import CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage;
import CIM.IEC61970.Informative.InfWork.InfWorkPackage;
import CIM.IEC61970.Informative.InfWork.Project;
import CIM.IEC61970.Informative.InfWork.WorkCostDetail;
import java.util.Collection;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.util.EObjectWithInverseResolvingEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Erp Project Accounting</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.impl.ErpProjectAccountingImpl#getProjects <em>Projects</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.impl.ErpProjectAccountingImpl#getWorkCostDetails <em>Work Cost Details</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.impl.ErpProjectAccountingImpl#getErpTimeEntries <em>Erp Time Entries</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.impl.ErpProjectAccountingImpl#getWorks <em>Works</em>}</li>
* </ul>
*
* @generated
*/
public class ErpProjectAccountingImpl extends DocumentImpl implements ErpProjectAccounting {
/**
* The cached value of the '{@link #getProjects() <em>Projects</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getProjects()
* @generated
* @ordered
*/
protected EList<Project> projects;
/**
* The cached value of the '{@link #getWorkCostDetails() <em>Work Cost Details</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getWorkCostDetails()
* @generated
* @ordered
*/
protected EList<WorkCostDetail> workCostDetails;
/**
* The cached value of the '{@link #getErpTimeEntries() <em>Erp Time Entries</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getErpTimeEntries()
* @generated
* @ordered
*/
protected EList<ErpTimeEntry> erpTimeEntries;
/**
* The cached value of the '{@link #getWorks() <em>Works</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getWorks()
* @generated
* @ordered
*/
protected EList<Work> works;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ErpProjectAccountingImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return InfERPSupportPackage.Literals.ERP_PROJECT_ACCOUNTING;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Project> getProjects() {
if (projects == null) {
projects = new EObjectWithInverseResolvingEList<Project>(Project.class, this, InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__PROJECTS, InfWorkPackage.PROJECT__ERP_PROJECT_ACCOUNTING);
}
return projects;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<WorkCostDetail> getWorkCostDetails() {
if (workCostDetails == null) {
workCostDetails = new EObjectWithInverseResolvingEList<WorkCostDetail>(WorkCostDetail.class, this, InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__WORK_COST_DETAILS, InfWorkPackage.WORK_COST_DETAIL__ERP_PROJECT_ACCOUNTING);
}
return workCostDetails;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<ErpTimeEntry> getErpTimeEntries() {
if (erpTimeEntries == null) {
erpTimeEntries = new EObjectWithInverseResolvingEList<ErpTimeEntry>(ErpTimeEntry.class, this, InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__ERP_TIME_ENTRIES, InfERPSupportPackage.ERP_TIME_ENTRY__ERP_PROJECT_ACCOUNTING);
}
return erpTimeEntries;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Work> getWorks() {
if (works == null) {
works = new EObjectWithInverseResolvingEList<Work>(Work.class, this, InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__WORKS, WorkPackage.WORK__ERP_PROJECT_ACCOUNTING);
}
return works;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__PROJECTS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getProjects()).basicAdd(otherEnd, msgs);
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__WORK_COST_DETAILS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getWorkCostDetails()).basicAdd(otherEnd, msgs);
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__ERP_TIME_ENTRIES:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getErpTimeEntries()).basicAdd(otherEnd, msgs);
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__WORKS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getWorks()).basicAdd(otherEnd, msgs);
}
return super.eInverseAdd(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__PROJECTS:
return ((InternalEList<?>)getProjects()).basicRemove(otherEnd, msgs);
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__WORK_COST_DETAILS:
return ((InternalEList<?>)getWorkCostDetails()).basicRemove(otherEnd, msgs);
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__ERP_TIME_ENTRIES:
return ((InternalEList<?>)getErpTimeEntries()).basicRemove(otherEnd, msgs);
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__WORKS:
return ((InternalEList<?>)getWorks()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__PROJECTS:
return getProjects();
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__WORK_COST_DETAILS:
return getWorkCostDetails();
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__ERP_TIME_ENTRIES:
return getErpTimeEntries();
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__WORKS:
return getWorks();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__PROJECTS:
getProjects().clear();
getProjects().addAll((Collection<? extends Project>)newValue);
return;
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__WORK_COST_DETAILS:
getWorkCostDetails().clear();
getWorkCostDetails().addAll((Collection<? extends WorkCostDetail>)newValue);
return;
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__ERP_TIME_ENTRIES:
getErpTimeEntries().clear();
getErpTimeEntries().addAll((Collection<? extends ErpTimeEntry>)newValue);
return;
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__WORKS:
getWorks().clear();
getWorks().addAll((Collection<? extends Work>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__PROJECTS:
getProjects().clear();
return;
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__WORK_COST_DETAILS:
getWorkCostDetails().clear();
return;
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__ERP_TIME_ENTRIES:
getErpTimeEntries().clear();
return;
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__WORKS:
getWorks().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__PROJECTS:
return projects != null && !projects.isEmpty();
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__WORK_COST_DETAILS:
return workCostDetails != null && !workCostDetails.isEmpty();
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__ERP_TIME_ENTRIES:
return erpTimeEntries != null && !erpTimeEntries.isEmpty();
case InfERPSupportPackage.ERP_PROJECT_ACCOUNTING__WORKS:
return works != null && !works.isEmpty();
}
return super.eIsSet(featureID);
}
} //ErpProjectAccountingImpl
| |
// ----------------------------------------------------------------------------
// Copyright 2007-2013, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2009/08/07 Martin D. Flynn
// -Initial release
// ----------------------------------------------------------------------------
package org.opengts.util;
import java.util.*;
import java.io.*;
import java.net.*;
import java.lang.management.*;
import javax.management.*;
import javax.management.remote.*;
import javax.xml.parsers.*;
import org.w3c.dom.*;
import org.xml.sax.*;
/**
*** ServoceRequest tools
**/
public class ServiceRequest
{
// ------------------------------------------------------------------------
protected static final String SRTAG_Request = "GTSRequest";
protected static final String SRTAG_Response = "GTSResponse";
protected static final String SRTAG_Message = "Message";
protected static final String SRTAG_Authorization = "Authorization";
protected static final String SRATTR_command = "command";
protected static final String SRATTR_result = "result";
protected static final String SRATTR_code = "code";
protected static final String SRATTR_account = "account";
protected static final String SRATTR_user = "user";
protected static final String SRATTR_password = "password";
protected static final String METHOD_handleRequest = "handleRequest";
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
/**
*** Request Body handler class
**/
public interface RequestBody
{
public StringBuffer appendRequestBody(StringBuffer sb, int indent);
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
/**
*** Authorization class
**/
public static class Authorization
{
private ServiceRequest servReq = null;
private String accountID = null;
private String userID = null;
private String password = null;
public Authorization() {
super();
}
public Authorization(Authorization other) {
super();
if (other != null) {
this.accountID = other.accountID;
this.userID = other.userID;
this.password = other.password;
}
}
public Authorization(String aid, String uid, String pwd) {
this.servReq = null;
this.accountID = StringTools.trim(aid);
this.userID = StringTools.trim(uid);
this.password = pwd;
}
public void setServiceRequest(ServiceRequest servReq) {
this.servReq = servReq;
}
public StringBuffer toXML(StringBuffer sb, int indent) {
if (sb == null) { sb = new StringBuffer(); }
sb.append(StringTools.replicateString(" ", indent));
sb.append("<"+this.getTagAuthorization());
if (!StringTools.isBlank(this.accountID)) {
sb.append(" "+this.getAttrAccount()+"=\""+this.accountID+"\"");
}
if (!StringTools.isBlank(this.userID)) {
sb.append(" "+this.getAttrUser()+"=\""+this.userID+"\"");
}
if (!StringTools.isBlank(this.password)) {
sb.append(" "+this.getAttrPassword()+"=\""+this.password+"\"");
}
sb.append("/>\n");
return sb;
}
public String toString() {
return this.toXML(null,3).toString();
}
public String getTagAuthorization() {
return (this.servReq != null)? this.servReq.getTagAuthorization() : SRTAG_Authorization;
}
public String getAttrAccount() {
return (this.servReq != null)? this.servReq.getAttrAccount() : SRATTR_account;
}
public String getAttrUser() {
return (this.servReq != null)? this.servReq.getAttrUser() : SRATTR_user;
}
public String getAttrPassword() {
return (this.servReq != null)? this.servReq.getAttrPassword() : SRATTR_password;
}
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
private JMXServiceURL jmxURL = null;
private String jmxObjectName = "";
private URL serviceURL = null;
private Authorization auth = null;
private String command = null;
private RequestBody requestBody = null;
/**
*** Constructor
**/
public ServiceRequest()
{
super();
}
/**
*** Clone Constructor
**/
public ServiceRequest(ServiceRequest other)
{
super();
if (other != null) {
this.jmxURL = other.jmxURL;
this.jmxObjectName = other.jmxObjectName;
this.serviceURL = other.serviceURL;
this.command = other.command;
this.requestBody = other.requestBody;
if (other.auth != null) {
this.setAuthorization(new Authorization(other.auth));
}
}
}
/**
*** Constructor
*** @param url The Service URL
**/
public ServiceRequest(String url)
throws MalformedURLException
{
this.setURL(url);
}
// ----------
/**
*** Constructor
*** @param serviceURL The Service URL
**/
public ServiceRequest(URL serviceURL)
{
this.setURL(serviceURL);
}
// ----------
/**
*** Constructor
*** @param jmxURL The JMX Service URL
**/
public ServiceRequest(JMXServiceURL jmxURL, String jmxObjName)
{
this.setURL(jmxURL, jmxObjName);
}
// ------------------------------------------------------------------------
/**
*** Sets the service URL
*** @param url The Service URL
*** @return This ServiceRequest
**/
public ServiceRequest setURL(String url)
throws MalformedURLException
{
this.jmxURL = null;
this.serviceURL = null;
if (StringTools.startsWithIgnoreCase(url, "http")) {
this.serviceURL = new URL(url);
} else
if (StringTools.startsWithIgnoreCase(url, "service:jmx:")) {
this.jmxURL = new JMXServiceURL(url);
} else {
throw new MalformedURLException("Invalid URL: " + url);
}
return this;
}
// ----------
/**
*** Sets the service URL
*** @param url The Service URL
*** @return This ServiceRequest
**/
public ServiceRequest setURL(URL url)
{
this.jmxURL = null;
this.serviceURL = url;
return this;
}
/**
*** Gets the service URL
*** @return url The Service URL
**/
public URL getURL()
{
return this.serviceURL;
}
// ----------
/**
*** Sets the service URL
*** @param url The Service URL
*** @return This ServiceRequest
**/
public ServiceRequest setURL(JMXServiceURL url, String jmxObjName)
{
this.jmxURL = url;
this.setJMXObjectName(jmxObjName);
this.serviceURL = null;
return this;
}
/**
*** Gets the service URL
*** @return url The Service URL
**/
public JMXServiceURL getJMXServiceURL()
{
return this.jmxURL;
}
/**
*** Returns true if this is a JMX request
*** @return True if this is a JMX request
**/
public boolean isJMX()
{
return (this.jmxURL != null);
}
// ------------------------------------------------------------------------
/**
*** Sets the JMX service object name
*** @param objName The JMX service object name
**/
public void setJMXObjectName(String objName)
{
this.jmxObjectName = (objName != null)? objName : "";
}
/**
*** Gets the JMX service object name
*** @return The JMX service object name
**/
public String getJMXObjectName()
{
return this.jmxObjectName;
}
// ------------------------------------------------------------------------
/**
*** Sets the Service Authorization
*** @param acctID The Authorization account ID
*** @param userID The Authorization user ID
*** @param passwd The Authorization password
*** @return This ServiceRequest
**/
public ServiceRequest setAuthorization(String acctID, String userID, String passwd)
{
this.setAuthorization(new Authorization(acctID, userID, passwd));
return this;
}
/**
*** Sets the Service Authorization
*** @param auth The Authorization wrapper
*** @return This ServiceRequest
**/
public ServiceRequest setAuthorization(Authorization auth)
{
this.auth = auth;
if (this.auth != null) {
this.auth.setServiceRequest(this);
}
return this;
}
// ------------------------------------------------------------------------
/**
*** Sets the command ID included in the request header
*** @param cmd The command ID
*** @return This ServiceRequest
**/
public ServiceRequest setCommand(String cmd)
{
this.command = StringTools.trim(cmd);
return this;
}
/**
*** Gets the command ID included in the request header
*** @return The command ID
**/
public String getCommand()
{
return (this.command != null)? this.command : "";
}
// ------------------------------------------------------------------------
/**
*** Sets the request body handler
*** @param rb The RequestBody handler
*** @return This ServiceRequest
**/
public ServiceRequest setRequestBody(RequestBody rb)
{
this.requestBody = rb;
return this;
}
/**
*** Appends the request body to the specified StringBuffer
*** @param sb The StringBuffer
*** @param indent The prefixing spaces to include
*** @return The StringBuffer
**/
public StringBuffer appendRequestBody(StringBuffer sb, int indent)
{
if (this.requestBody != null) {
this.requestBody.appendRequestBody(sb, indent);
}
return sb;
}
// ------------------------------------------------------------------------
/**
*** Assembles and returns an XML request string
*** @param command The command ID to include in the header
*** @param rb The RequestBody handler
*** @return The XML request
**/
public String toXML(String command, RequestBody rb)
{
StringBuffer sb = new StringBuffer();
int indent = 3;
/* header */
sb.append("<?xml version='1.0' encoding='UTF-8' standalone='no' ?>\n");
sb.append("<"+this.getTagRequest());
String cmd = StringTools.blankDefault(command,this.getCommand());
if (!StringTools.isBlank(cmd)) {
sb.append(" "+this.getAttrCommand()+"=\""+cmd+"\"");
}
sb.append(">\n");
/* request authorization */
if (this.auth != null) {
this.auth.toXML(sb,indent);
}
/* request body */
if (rb != null) {
rb.appendRequestBody(sb,indent);
} else {
this.appendRequestBody(sb,indent);
}
/* footer */
sb.append("</"+this.getTagRequest()+">\n");
return sb.toString();
}
/**
*** Returns a XML string representation of this instance
*** @return An XML string representation
**/
public String toString()
{
return this.toXML(this.getCommand(), null);
}
// ------------------------------------------------------------------------
/**
*** Sends the request and returns the results as an XML Document
**/
public Document sendRequest(String command, RequestBody rb)
throws IOException
{
int timeoutMS = -1;
return this._sendRequest(this.toXML(command, rb), timeoutMS);
}
/**
*** Sends the request and returns the results as an XML Document
**/
public Document sendRequest(File reqFile)
throws IOException
{
int timeoutMS = -1;
byte rq[] = FileTools.readFile(reqFile);
if (!ListTools.isEmpty(rq)) {
String rqStr = StringTools.toStringValue(rq).trim();
if (rqStr.startsWith("<")) {
return this._sendRequest(rqStr, timeoutMS);
} else {
Print.logError("Invalid Request XML: \n" + rqStr);
return null;
}
} else {
Print.logError("File not found (or is empty): " + reqFile);
return null;
}
}
/**
*** Sends the request and returns the results as an XML Document
**/
protected Document _sendRequest(String reqXMLStr, int timeoutMS)
throws IOException
{
/* request */
if (RTConfig.isDebugMode()) {
if (this.isJMX()) {
Print.logInfo("JMX url: " + this.getJMXServiceURL());
Print.logInfo(" name: " + this.getJMXObjectName());
} else {
Print.logInfo("HTTP url: " + this.getURL());
}
Print.logInfo("Request:\n" + reqXMLStr);
}
/* service type */
byte xmlResp[] = this.isJMX()?
this._sendRequest_JMX(reqXMLStr) :
this._sendRequest_HTTP(reqXMLStr, timeoutMS);
if (RTConfig.isDebugMode()) {
Print.logInfo("Response:\n" + StringTools.toStringValue(xmlResp).trim());
}
/* get XML Document */
Document xmlDoc = XMLTools.getDocument(xmlResp);
if (xmlDoc == null) {
Print.logError("Response:\n" + StringTools.toStringValue(xmlResp).trim());
throw new IOException("Response XML Document error");
}
return xmlDoc;
}
/**
*** Sends the request and returns the results as an XML Document
**/
protected byte[] _sendRequest_JMX(String reqXMLStr)
throws IOException
{
/* connect to remote MBeanServer */
JMXConnector jmxc = JMXConnectorFactory.connect(this.getJMXServiceURL(), null);
/* invoke reuest handler */
byte xmlResp[] = null;
try {
MBeanServerConnection mbsc = jmxc.getMBeanServerConnection();
/* service name */
String objName = this.getJMXObjectName(); // "org.opengts.extra.war.service:type=ServiceProxy";
ObjectName name = null;
try {
name = new ObjectName(objName);
} catch (MalformedObjectNameException mone) {
throw new IOException("Invalid ObjectName: " + objName);
}
/* invoke */
try {
String xml = (String)mbsc.invoke(name,
METHOD_handleRequest,
new Object[] { reqXMLStr },
new String[] { "java.lang.String" });
xmlResp = xml.getBytes();
} catch (Throwable th) { // MBeanException, InstanceNotFoundException
throw new IOException("MBean Invocation: " + th.getMessage());
}
} finally {
/* close */
try { jmxc.close(); } catch (Throwable th) { /* ignore */ }
}
return xmlResp;
}
/**
*** Sends the request and returns the results as an XML Document
**/
protected byte[] _sendRequest_HTTP(String reqXMLStr, int timeoutMS)
throws IOException
{
byte xmlReq[] = reqXMLStr.toString().getBytes();
return HTMLTools.readPage_POST(this.getURL(), HTMLTools.MIME_XML(), xmlReq, timeoutMS);
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
public String getTagResponse()
{
return SRTAG_Response;
}
public String getTagRequest()
{
return SRTAG_Request;
}
public String getTagAuthorization()
{
return SRTAG_Authorization;
}
public String getAttrCommand()
{
return SRATTR_command;
}
public String getAttrResult()
{
return SRATTR_result;
}
public String getAttrAccount()
{
return SRATTR_account;
}
public String getAttrUser()
{
return SRATTR_user;
}
public String getAttrPassword()
{
return SRATTR_password;
}
// ------------------------------------------------------------------------
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2019_11_01.implementation;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.AzureServiceFuture;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.ListOperationCallback;
import com.microsoft.azure.Page;
import com.microsoft.azure.PagedList;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import java.io.IOException;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.Path;
import retrofit2.http.Query;
import retrofit2.http.Url;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in ExpressRouteLinks.
*/
public class ExpressRouteLinksInner {
/** The Retrofit service to perform REST calls. */
private ExpressRouteLinksService service;
/** The service client containing this operation class. */
private NetworkManagementClientImpl client;
/**
* Initializes an instance of ExpressRouteLinksInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public ExpressRouteLinksInner(Retrofit retrofit, NetworkManagementClientImpl client) {
this.service = retrofit.create(ExpressRouteLinksService.class);
this.client = client;
}
/**
* The interface defining all the services for ExpressRouteLinks to be
* used by Retrofit to perform actually REST calls.
*/
interface ExpressRouteLinksService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2019_11_01.ExpressRouteLinks get" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}/links/{linkName}")
Observable<Response<ResponseBody>> get(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("expressRoutePortName") String expressRoutePortName, @Path("linkName") String linkName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2019_11_01.ExpressRouteLinks list" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ExpressRoutePorts/{expressRoutePortName}/links")
Observable<Response<ResponseBody>> list(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("expressRoutePortName") String expressRoutePortName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2019_11_01.ExpressRouteLinks listNext" })
@GET
Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* Retrieves the specified ExpressRouteLink resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param linkName The name of the ExpressRouteLink resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ExpressRouteLinkInner object if successful.
*/
public ExpressRouteLinkInner get(String resourceGroupName, String expressRoutePortName, String linkName) {
return getWithServiceResponseAsync(resourceGroupName, expressRoutePortName, linkName).toBlocking().single().body();
}
/**
* Retrieves the specified ExpressRouteLink resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param linkName The name of the ExpressRouteLink resource.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<ExpressRouteLinkInner> getAsync(String resourceGroupName, String expressRoutePortName, String linkName, final ServiceCallback<ExpressRouteLinkInner> serviceCallback) {
return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, expressRoutePortName, linkName), serviceCallback);
}
/**
* Retrieves the specified ExpressRouteLink resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param linkName The name of the ExpressRouteLink resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ExpressRouteLinkInner object
*/
public Observable<ExpressRouteLinkInner> getAsync(String resourceGroupName, String expressRoutePortName, String linkName) {
return getWithServiceResponseAsync(resourceGroupName, expressRoutePortName, linkName).map(new Func1<ServiceResponse<ExpressRouteLinkInner>, ExpressRouteLinkInner>() {
@Override
public ExpressRouteLinkInner call(ServiceResponse<ExpressRouteLinkInner> response) {
return response.body();
}
});
}
/**
* Retrieves the specified ExpressRouteLink resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param linkName The name of the ExpressRouteLink resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ExpressRouteLinkInner object
*/
public Observable<ServiceResponse<ExpressRouteLinkInner>> getWithServiceResponseAsync(String resourceGroupName, String expressRoutePortName, String linkName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (expressRoutePortName == null) {
throw new IllegalArgumentException("Parameter expressRoutePortName is required and cannot be null.");
}
if (linkName == null) {
throw new IllegalArgumentException("Parameter linkName is required and cannot be null.");
}
final String apiVersion = "2019-11-01";
return service.get(this.client.subscriptionId(), resourceGroupName, expressRoutePortName, linkName, apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ExpressRouteLinkInner>>>() {
@Override
public Observable<ServiceResponse<ExpressRouteLinkInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<ExpressRouteLinkInner> clientResponse = getDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<ExpressRouteLinkInner> getDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<ExpressRouteLinkInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<ExpressRouteLinkInner>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Retrieve the ExpressRouteLink sub-resources of the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<ExpressRouteLinkInner> object if successful.
*/
public PagedList<ExpressRouteLinkInner> list(final String resourceGroupName, final String expressRoutePortName) {
ServiceResponse<Page<ExpressRouteLinkInner>> response = listSinglePageAsync(resourceGroupName, expressRoutePortName).toBlocking().single();
return new PagedList<ExpressRouteLinkInner>(response.body()) {
@Override
public Page<ExpressRouteLinkInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Retrieve the ExpressRouteLink sub-resources of the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<ExpressRouteLinkInner>> listAsync(final String resourceGroupName, final String expressRoutePortName, final ListOperationCallback<ExpressRouteLinkInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listSinglePageAsync(resourceGroupName, expressRoutePortName),
new Func1<String, Observable<ServiceResponse<Page<ExpressRouteLinkInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRouteLinkInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Retrieve the ExpressRouteLink sub-resources of the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ExpressRouteLinkInner> object
*/
public Observable<Page<ExpressRouteLinkInner>> listAsync(final String resourceGroupName, final String expressRoutePortName) {
return listWithServiceResponseAsync(resourceGroupName, expressRoutePortName)
.map(new Func1<ServiceResponse<Page<ExpressRouteLinkInner>>, Page<ExpressRouteLinkInner>>() {
@Override
public Page<ExpressRouteLinkInner> call(ServiceResponse<Page<ExpressRouteLinkInner>> response) {
return response.body();
}
});
}
/**
* Retrieve the ExpressRouteLink sub-resources of the specified ExpressRoutePort resource.
*
* @param resourceGroupName The name of the resource group.
* @param expressRoutePortName The name of the ExpressRoutePort resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ExpressRouteLinkInner> object
*/
public Observable<ServiceResponse<Page<ExpressRouteLinkInner>>> listWithServiceResponseAsync(final String resourceGroupName, final String expressRoutePortName) {
return listSinglePageAsync(resourceGroupName, expressRoutePortName)
.concatMap(new Func1<ServiceResponse<Page<ExpressRouteLinkInner>>, Observable<ServiceResponse<Page<ExpressRouteLinkInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRouteLinkInner>>> call(ServiceResponse<Page<ExpressRouteLinkInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Retrieve the ExpressRouteLink sub-resources of the specified ExpressRoutePort resource.
*
ServiceResponse<PageImpl<ExpressRouteLinkInner>> * @param resourceGroupName The name of the resource group.
ServiceResponse<PageImpl<ExpressRouteLinkInner>> * @param expressRoutePortName The name of the ExpressRoutePort resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<ExpressRouteLinkInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<ExpressRouteLinkInner>>> listSinglePageAsync(final String resourceGroupName, final String expressRoutePortName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (expressRoutePortName == null) {
throw new IllegalArgumentException("Parameter expressRoutePortName is required and cannot be null.");
}
final String apiVersion = "2019-11-01";
return service.list(this.client.subscriptionId(), resourceGroupName, expressRoutePortName, apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<ExpressRouteLinkInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRouteLinkInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<ExpressRouteLinkInner>> result = listDelegate(response);
return Observable.just(new ServiceResponse<Page<ExpressRouteLinkInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<ExpressRouteLinkInner>> listDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<ExpressRouteLinkInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<ExpressRouteLinkInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Retrieve the ExpressRouteLink sub-resources of the specified ExpressRoutePort resource.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<ExpressRouteLinkInner> object if successful.
*/
public PagedList<ExpressRouteLinkInner> listNext(final String nextPageLink) {
ServiceResponse<Page<ExpressRouteLinkInner>> response = listNextSinglePageAsync(nextPageLink).toBlocking().single();
return new PagedList<ExpressRouteLinkInner>(response.body()) {
@Override
public Page<ExpressRouteLinkInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Retrieve the ExpressRouteLink sub-resources of the specified ExpressRoutePort resource.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param serviceFuture the ServiceFuture object tracking the Retrofit calls
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<ExpressRouteLinkInner>> listNextAsync(final String nextPageLink, final ServiceFuture<List<ExpressRouteLinkInner>> serviceFuture, final ListOperationCallback<ExpressRouteLinkInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listNextSinglePageAsync(nextPageLink),
new Func1<String, Observable<ServiceResponse<Page<ExpressRouteLinkInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRouteLinkInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Retrieve the ExpressRouteLink sub-resources of the specified ExpressRoutePort resource.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ExpressRouteLinkInner> object
*/
public Observable<Page<ExpressRouteLinkInner>> listNextAsync(final String nextPageLink) {
return listNextWithServiceResponseAsync(nextPageLink)
.map(new Func1<ServiceResponse<Page<ExpressRouteLinkInner>>, Page<ExpressRouteLinkInner>>() {
@Override
public Page<ExpressRouteLinkInner> call(ServiceResponse<Page<ExpressRouteLinkInner>> response) {
return response.body();
}
});
}
/**
* Retrieve the ExpressRouteLink sub-resources of the specified ExpressRoutePort resource.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ExpressRouteLinkInner> object
*/
public Observable<ServiceResponse<Page<ExpressRouteLinkInner>>> listNextWithServiceResponseAsync(final String nextPageLink) {
return listNextSinglePageAsync(nextPageLink)
.concatMap(new Func1<ServiceResponse<Page<ExpressRouteLinkInner>>, Observable<ServiceResponse<Page<ExpressRouteLinkInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRouteLinkInner>>> call(ServiceResponse<Page<ExpressRouteLinkInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Retrieve the ExpressRouteLink sub-resources of the specified ExpressRoutePort resource.
*
ServiceResponse<PageImpl<ExpressRouteLinkInner>> * @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<ExpressRouteLinkInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<ExpressRouteLinkInner>>> listNextSinglePageAsync(final String nextPageLink) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
}
String nextUrl = String.format("%s", nextPageLink);
return service.listNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<ExpressRouteLinkInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ExpressRouteLinkInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<ExpressRouteLinkInner>> result = listNextDelegate(response);
return Observable.just(new ServiceResponse<Page<ExpressRouteLinkInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<ExpressRouteLinkInner>> listNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<ExpressRouteLinkInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<ExpressRouteLinkInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.codeInsight;
import com.intellij.psi.PsiElement;
import com.jetbrains.python.psi.*;
import com.intellij.psi.util.QualifiedName;
import com.jetbrains.python.psi.resolve.QualifiedNameResolver;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* @author yole
*/
public abstract class PyPsiPath {
@Nullable
public abstract PsiElement resolve(PsiElement module);
public static class ToFile extends PyPsiPath {
private final QualifiedName myQualifiedName;
public ToFile(String qualifiedName) {
myQualifiedName = QualifiedName.fromDottedString(qualifiedName);
}
@Nullable
@Override
public PsiElement resolve(PsiElement context) {
PyPsiFacade pyPsiFacade = PyPsiFacade.getInstance(context.getProject());
QualifiedNameResolver visitor = pyPsiFacade.qualifiedNameResolver(myQualifiedName).fromElement(context);
return visitor.firstResult();
}
}
public static class ToClassQName extends PyPsiPath {
private final QualifiedName myQualifiedName;
public ToClassQName(@NotNull final String qualifiedName) {
myQualifiedName = QualifiedName.fromDottedString(qualifiedName);
}
@Nullable
@Override
public PsiElement resolve(PsiElement context) {
return PyPsiFacade.getInstance(context.getProject()).findClass(myQualifiedName.toString());
}
}
public static class ToClass extends PyPsiPath {
private final PyPsiPath myParent;
private final String myClassName;
public ToClass(PyPsiPath parent, String className) {
myParent = parent;
myClassName = className;
}
@Override
public PsiElement resolve(PsiElement context) {
PsiElement parent = myParent.resolve(context);
if (parent == null) {
return null;
}
if (parent instanceof PyFile) {
return ((PyFile) parent).findTopLevelClass(myClassName);
}
if (parent instanceof PyClass) {
for (PsiElement element : parent.getChildren()) {
if (element instanceof PyClass && myClassName.equals(((PyClass)element).getName())) {
return element;
}
}
}
ClassFinder finder = new ClassFinder(myClassName);
parent.acceptChildren(finder);
return finder.myResult != null ? finder.myResult : parent;
}
}
private static class ClassFinder extends PyRecursiveElementVisitor {
private final String myName;
private PyClass myResult;
public ClassFinder(String name) {
myName = name;
}
@Override
public void visitPyClass(PyClass node) {
super.visitPyClass(node);
if (myName.equals(node.getName())) {
myResult = node;
}
}
}
public static class ToFunction extends PyPsiPath {
private final PyPsiPath myParent;
private final String myFunctionName;
public ToFunction(PyPsiPath parent, String functionName) {
myParent = parent;
myFunctionName = functionName;
}
@Override
public PsiElement resolve(PsiElement context) {
PsiElement parent = myParent.resolve(context);
if (parent == null) {
return null;
}
if (parent instanceof PyFile) {
return ((PyFile) parent).findTopLevelFunction(myFunctionName);
}
if (parent instanceof PyClass) {
return ((PyClass) parent).findMethodByName(myFunctionName, false, null);
}
for (PsiElement element : parent.getChildren()) {
if (element instanceof PyFunction && myFunctionName.equals(((PyFunction)element).getName())) {
return element;
}
}
return parent;
}
}
private static class FunctionFinder extends PyRecursiveElementVisitor {
private final String myName;
private PyFunction myResult;
public FunctionFinder(String name) {
myName = name;
}
@Override
public void visitPyFunction(PyFunction node) {
super.visitPyFunction(node);
if (myName.equals(node.getName())) {
myResult = node;
}
}
}
public static class ToFunctionRecursive extends PyPsiPath {
private final PyPsiPath myParent;
private final String myFunctionName;
public ToFunctionRecursive(PyPsiPath parent, String functionName) {
myParent = parent;
myFunctionName = functionName;
}
@Override
public PsiElement resolve(PsiElement context) {
PsiElement parent = myParent.resolve(context);
if (parent == null) {
return null;
}
FunctionFinder finder = new FunctionFinder(myFunctionName);
parent.acceptChildren(finder);
return finder.myResult != null ? finder.myResult : parent;
}
}
public static class ToClassAttribute extends PyPsiPath {
private final PyPsiPath myParent;
private final String myAttributeName;
public ToClassAttribute(PyPsiPath parent, String attributeName) {
myAttributeName = attributeName;
myParent = parent;
}
@Override
public PsiElement resolve(PsiElement context) {
PsiElement parent = myParent.resolve(context);
if (!(parent instanceof PyClass)) {
return null;
}
return ((PyClass)parent).findClassAttribute(myAttributeName, true, null);
}
}
public static class ToCall extends PyPsiPath {
private final PyPsiPath myParent;
private final String myCallName;
private final String[] myArgs;
public ToCall(PyPsiPath parent, String callName, String... args) {
myParent = parent;
myCallName = callName;
myArgs = args;
}
@Override
public PsiElement resolve(PsiElement context) {
PsiElement parent = myParent.resolve(context);
if (parent == null) {
return null;
}
CallFinder finder = new CallFinder(myCallName, myArgs);
parent.accept(finder);
return finder.myResult != null ? finder.myResult : parent;
}
}
private static class CallFinder extends PyRecursiveElementVisitor {
private PsiElement myResult;
private final String myCallName;
private final String[] myArgs;
public CallFinder(String callName, String[] args) {
myCallName = callName;
myArgs = args;
}
@Override
public void visitPyCallExpression(PyCallExpression node) {
if (myResult != null) {
return;
}
super.visitPyCallExpression(node);
final PyExpression callee = node.getCallee();
if (callee instanceof PyReferenceExpression) {
String calleeName = ((PyReferenceExpression) callee).getReferencedName();
if (myCallName.equals(calleeName)) {
final PyExpression[] args = node.getArguments();
if (myArgs.length <= args.length) {
boolean argsMatch = true;
for (int i = 0; i < myArgs.length; i++) {
if (!(args[i] instanceof PyStringLiteralExpression) ||
!myArgs [i].equals(((PyStringLiteralExpression)args[i]).getStringValue())) {
argsMatch = false;
break;
}
}
if (argsMatch) {
myResult = node;
}
}
}
}
}
}
public static class ToAssignment extends PyPsiPath {
private final PyPsiPath myParent;
private final String myAssignee;
public ToAssignment(PyPsiPath parent, String assignee) {
myParent = parent;
myAssignee = assignee;
}
@Nullable
@Override
public PsiElement resolve(PsiElement context) {
PsiElement parent = myParent.resolve(context);
if (parent == null) {
return null;
}
AssignmentFinder finder = new AssignmentFinder(myAssignee);
parent.accept(finder);
return finder.myResult != null ? finder.myResult : parent;
}
}
private static class AssignmentFinder extends PyRecursiveElementVisitor {
private final String myAssignee;
private PsiElement myResult;
public AssignmentFinder(String assignee) {
myAssignee = assignee;
}
@Override
public void visitPyAssignmentStatement(PyAssignmentStatement node) {
PyExpression lhs = node.getLeftHandSideExpression();
if (lhs != null && myAssignee.equals(lhs.getText())) {
myResult = node;
}
}
}
}
| |
/*
* Copyright (C) 2012-2015 DataStax Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.driver.core;
import com.datastax.driver.core.exceptions.InvalidTypeException;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
* A simple {@code RegularStatement} implementation built directly from a query
* string.
*/
public class SimpleStatement extends RegularStatement {
private final String query;
private final Object[] values;
private final Map<String, Object> namedValues;
private volatile ByteBuffer routingKey;
private volatile String keyspace;
/**
* Creates a new {@code SimpleStatement} with the provided query string (and no values).
*
* @param query the query string.
*/
public SimpleStatement(String query) {
this(query, (Object[]) null);
}
/**
* Creates a new {@code SimpleStatement} with the provided query string and values.
* <p/>
* This version of SimpleStatement is useful when you want to execute a
* query only once (and thus do not want to resort to prepared statement), but
* do not want to convert all column values to string (typically, if you have blob
* values, encoding them to a hexadecimal string is not very efficient). In
* that case, you can provide a query string with bind markers to this constructor
* along with the values for those bind variables. When executed, the server will
* prepare the provided, bind the provided values to that prepare statement and
* execute the resulting statement. Thus,
* <pre>
* session.execute(new SimpleStatement(query, value1, value2, value3));
* </pre>
* is functionally equivalent to
* <pre>
* PreparedStatement ps = session.prepare(query);
* session.execute(ps.bind(value1, value2, value3));
* </pre>
* except that the former version:
* <ul>
* <li>Requires only one round-trip to a Cassandra node.</li>
* <li>Does not left any prepared statement stored in memory (neither client or
* server side) once it has been executed.</li>
* </ul>
* <p/>
* Note that the types of the {@code values} provided to this method will
* not be validated by the driver as is done by {@link BoundStatement#bind} since
* {@code query} is not parsed (and hence the driver cannot know what those values
* should be). The codec to serialize each value will be chosen in the codec registry
* associated with the cluster executing this statement, based on the value's Java type
* (this is the equivalent to calling {@link CodecRegistry#codecFor(Object)}).
* If too many or too few values are provided, or if a value is not a valid one for
* the variable it is bound to, an
* {@link com.datastax.driver.core.exceptions.InvalidQueryException} will be thrown
* by Cassandra at execution time. A {@code CodecNotFoundException} may be
* thrown by this constructor however, if the codec registry does not know how to
* handle one of the values.
* <p/>
* If you have a single value of type {@code Map<String, Object>}, you can't call this
* constructor using the varargs syntax, because the signature collides with
* {@link #SimpleStatement(String, Map)}. To prevent this, pass an explicit
* array object:
* <pre>
* new SimpleStatement("...", new Object[]{m});
* </pre>
*
* @param query the query string.
* @param values values required for the execution of {@code query}.
* @throws IllegalArgumentException if the number of values is greater than 65535.
*/
public SimpleStatement(String query, Object... values) {
if (values != null && values.length > 65535)
throw new IllegalArgumentException("Too many values, the maximum allowed is 65535");
this.query = query;
this.values = values;
this.namedValues = null;
}
/**
* Creates a new {@code SimpleStatement} with the provided query string and named values.
* <p/>
* This constructor requires that the query string use named placeholders, for example:
* <pre>{@code
* new SimpleStatement("SELECT * FROM users WHERE id = :i", ImmutableMap.<String, Object>of("i", 1));}
* </pre>
* Make sure that the map is correctly typed {@code Map<String, Object>}, otherwise you might
* accidentally call {@link #SimpleStatement(String, Object...)} with a positional value of type map.
* <p/>
* The types of the values will be handled the same way as with anonymous placeholders (see
* {@link #SimpleStatement(String, Object...)}).
* <p/>
* Simple statements with named values are only supported starting with native protocol
* {@link ProtocolVersion#V3 v3}. With earlier versions, an
* {@link com.datastax.driver.core.exceptions.UnsupportedFeatureException} will be thrown at execution time.
*
* @param query the query string.
* @param values named values required for the execution of {@code query}.
* @throws IllegalArgumentException if the number of values is greater than 65535.
*/
public SimpleStatement(String query, Map<String, Object> values) {
if (values.size() > 65535)
throw new IllegalArgumentException("Too many values, the maximum allowed is 65535");
this.query = query;
this.values = null;
this.namedValues = values;
}
@Override
public String getQueryString(CodecRegistry codecRegistry) {
return query;
}
@Override
public ByteBuffer[] getValues(ProtocolVersion protocolVersion, CodecRegistry codecRegistry) {
if (values == null)
return null;
return convert(values, protocolVersion, codecRegistry);
}
@Override
public Map<String, ByteBuffer> getNamedValues(ProtocolVersion protocolVersion, CodecRegistry codecRegistry) {
if (namedValues == null)
return null;
return convert(namedValues, protocolVersion, codecRegistry);
}
/**
* The number of values for this statement, that is the size of the array
* that will be returned by {@code getValues}.
*
* @return the number of values.
*/
public int valuesCount() {
if (values != null)
return values.length;
else if (namedValues != null)
return namedValues.size();
else
return 0;
}
@Override
public boolean hasValues(CodecRegistry codecRegistry) {
return (values != null && values.length > 0)
|| (namedValues != null && namedValues.size() > 0);
}
@Override
public boolean usesNamedValues() {
return namedValues != null && namedValues.size() > 0;
}
/**
* Returns the {@code i}th positional value as the Java type matching its CQL type.
* <p/>
* Note that, unlike with other driver types like {@link Row}, you can't retrieve named values by position. This
* getter will throw an exception if the statement was created with named values (or no values at all). Call
* {@link #usesNamedValues()} to check the type of values, and {@link #getObject(String)} if they are positional.
*
* @param i the index to retrieve.
* @return the {@code i}th value of this statement.
* @throws IllegalStateException if this statement does not have positional values.
* @throws IndexOutOfBoundsException if {@code i} is not a valid index for this object.
*/
public Object getObject(int i) {
if (values == null)
throw new IllegalStateException("This statement does not have positional values");
if (i < 0 || i >= values.length)
throw new ArrayIndexOutOfBoundsException(i);
return values[i];
}
/**
* Returns a named value as the Java type matching its CQL type.
*
* @param name the name of the value to retrieve.
* @return the value that matches the name, or {@code null} if there is no such name.
* @throws IllegalStateException if this statement does not have named values.
*/
public Object getObject(String name) {
if (namedValues == null)
throw new IllegalStateException("This statement does not have named values");
return namedValues.get(name);
}
/**
* Returns the names of the named values of this statement.
*
* @return the names of the named values of this statement.
* @throws IllegalStateException if this statement does not have named values.
*/
public Set<String> getValueNames() {
if (namedValues == null)
throw new IllegalStateException("This statement does not have named values");
return Collections.unmodifiableSet(namedValues.keySet());
}
/**
* Returns the routing key for the query.
* <p/>
* Unless the routing key has been explicitly set through
* {@link #setRoutingKey}, this method will return {@code null} to
* avoid having to parse the query string to retrieve the partition key.
*
* @param protocolVersion unused by this implementation (no internal serialization is required to compute the key).
* @param codecRegistry unused by this implementation (no internal serialization is required to compute the key).
* @return the routing key set through {@link #setRoutingKey} if such a key
* was set, {@code null} otherwise.
* @see Statement#getRoutingKey
*/
@Override
public ByteBuffer getRoutingKey(ProtocolVersion protocolVersion, CodecRegistry codecRegistry) {
return routingKey;
}
/**
* Sets the routing key for this query.
* <p/>
* This method allows you to manually provide a routing key for this query. It
* is thus optional since the routing key is only an hint for token aware
* load balancing policy but is never mandatory.
* <p/>
* If the partition key for the query is composite, use the
* {@link #setRoutingKey(ByteBuffer...)} method instead to build the
* routing key.
*
* @param routingKey the raw (binary) value to use as routing key.
* @return this {@code SimpleStatement} object.
* @see Statement#getRoutingKey
*/
public SimpleStatement setRoutingKey(ByteBuffer routingKey) {
this.routingKey = routingKey;
return this;
}
/**
* Returns the keyspace this query operates on.
* <p/>
* Unless the keyspace has been explicitly set through {@link #setKeyspace},
* this method will return {@code null} to avoid having to parse the query
* string.
*
* @return the keyspace set through {@link #setKeyspace} if such keyspace was
* set, {@code null} otherwise.
* @see Statement#getKeyspace
*/
@Override
public String getKeyspace() {
return keyspace;
}
/**
* Sets the keyspace this query operates on.
* <p/>
* This method allows you to manually provide a keyspace for this query. It
* is thus optional since the value returned by this method is only an hint
* for token aware load balancing policy but is never mandatory.
* <p/>
* Do note that if the query does not use a fully qualified keyspace, then
* you do not need to set the keyspace through that method as the
* currently logged in keyspace will be used.
*
* @param keyspace the name of the keyspace this query operates on.
* @return this {@code SimpleStatement} object.
* @see Statement#getKeyspace
*/
public SimpleStatement setKeyspace(String keyspace) {
this.keyspace = keyspace;
return this;
}
/**
* Sets the routing key for this query.
* <p/>
* See {@link #setRoutingKey(ByteBuffer)} for more information. This
* method is a variant for when the query partition key is composite and
* thus the routing key must be built from multiple values.
*
* @param routingKeyComponents the raw (binary) values to compose to obtain
* the routing key.
* @return this {@code SimpleStatement} object.
* @see Statement#getRoutingKey
*/
public SimpleStatement setRoutingKey(ByteBuffer... routingKeyComponents) {
this.routingKey = compose(routingKeyComponents);
return this;
}
/*
* This method performs a best-effort heuristic to guess which codec to use.
* Note that this is not particularly efficient as the codec registry needs to iterate over
* the registered codecs until it finds a suitable one.
*/
private static ByteBuffer[] convert(Object[] values, ProtocolVersion protocolVersion, CodecRegistry codecRegistry) {
ByteBuffer[] serializedValues = new ByteBuffer[values.length];
for (int i = 0; i < values.length; i++) {
Object value = values[i];
if (value == null) {
// impossible to locate the right codec when object is null,
// so forcing the result to null
serializedValues[i] = null;
} else {
if (value instanceof Token) {
// bypass CodecRegistry for Token instances
serializedValues[i] = ((Token) value).serialize(protocolVersion);
} else {
try {
TypeCodec<Object> codec = codecRegistry.codecFor(value);
serializedValues[i] = codec.serialize(value, protocolVersion);
} catch (Exception e) {
// Catch and rethrow to provide a more helpful error message (one that include which value is bad)
throw new InvalidTypeException(String.format("Value %d of type %s does not correspond to any CQL3 type", i, value.getClass()), e);
}
}
}
}
return serializedValues;
}
private static Map<String, ByteBuffer> convert(Map<String, Object> values, ProtocolVersion protocolVersion, CodecRegistry codecRegistry) {
Map<String, ByteBuffer> serializedValues = new HashMap<String, ByteBuffer>();
for (Map.Entry<String, Object> entry : values.entrySet()) {
String name = entry.getKey();
Object value = entry.getValue();
if (value == null) {
// impossible to locate the right codec when object is null,
// so forcing the result to null
serializedValues.put(name, null);
} else {
if (value instanceof Token) {
// bypass CodecRegistry for Token instances
serializedValues.put(name, ((Token) value).serialize(protocolVersion));
} else {
try {
TypeCodec<Object> codec = codecRegistry.codecFor(value);
serializedValues.put(name, codec.serialize(value, protocolVersion));
} catch (Exception e) {
// Catch and rethrow to provide a more helpful error message (one that include which value is bad)
throw new InvalidTypeException(String.format("Value '%s' of type %s does not correspond to any CQL3 type", name, value.getClass()), e);
}
}
}
}
return serializedValues;
}
/**
* Utility method to assemble different routing key components into a single {@link ByteBuffer}.
* Mainly intended for statements that need to generate a routing key out of their current values.
*
* @param buffers the components of the routing key.
* @return A ByteBuffer containing the serialized routing key
*/
static ByteBuffer compose(ByteBuffer... buffers) {
if (buffers.length == 1)
return buffers[0];
int totalLength = 0;
for (ByteBuffer bb : buffers)
totalLength += 2 + bb.remaining() + 1;
ByteBuffer out = ByteBuffer.allocate(totalLength);
for (ByteBuffer buffer : buffers) {
ByteBuffer bb = buffer.duplicate();
putShortLength(out, bb.remaining());
out.put(bb);
out.put((byte) 0);
}
out.flip();
return out;
}
static void putShortLength(ByteBuffer bb, int length) {
bb.put((byte) ((length >> 8) & 0xFF));
bb.put((byte) (length & 0xFF));
}
}
| |
/**
* Copyright (C) 2015 Orange
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.francetelecom.clara.cloud.commons;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.Column;
import javax.persistence.Embeddable;
import javax.xml.bind.annotation.XmlRootElement;
import java.io.Serializable;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.NoSuchElementException;
import java.util.StringTokenizer;
/**
* Reference to a Maven Artifact
*/
@Embeddable
@XmlRootElement
public class MavenReference implements Serializable {
/**
* Type, packaging and extension are often the same but not always. Type
* sources actually means classifier sources and extension jar for example.
* Packaging maven-plugin actually means extension jar.
* http://maven.40175.n5.nabble.com/standardized-Maven-GAV-URN-tp511480p512233.html
*/
private static final long serialVersionUID = 6454307533381753371L;
private static Logger logger = LoggerFactory.getLogger(MavenReference.class.getName());
@GuiMapping(status = GuiMapping.StatusType.SUPPORTED)
private String groupId;
@GuiMapping(status = GuiMapping.StatusType.SUPPORTED)
private String artifactId;
@GuiMapping(status = GuiMapping.StatusType.SUPPORTED)
@Column(name="MVN_VERSION")
private String version;
private String extension = null;
@GuiMapping(status = GuiMapping.StatusType.SUPPORTED)
private String classifier = null;
transient private String accessUrl;
private static final String[] EXCLUDED_EQUALS_FIELDS = new String[] { "accessUrl" };
/**
* JPA required default constructor
*/
public MavenReference() {
}
/**
* Copy constructor. Create a maven reference from another.
*
* @param source Reference to copy
*/
public MavenReference(MavenReference source) {
this.groupId = source.groupId;
this.artifactId = source.artifactId;
this.version = source.version;
this.extension = source.extension;
this.classifier = source.classifier;
if (source.getAccessUrl() != null) {
try {
this.setAccessUrl(new URL(source.getAccessUrl().toExternalForm()));
} catch (MalformedURLException e) {
// Ignores if malformed
logger.info("Malformed url detected while coping Maven reference. Ignoring accessUrl for " + source.toString());
}
}
}
public MavenReference(String groupId, String artifactId, String version) {
this (groupId, artifactId, version, "jar");
}
public MavenReference(String groupId, String artifactId, String version, String extension) {
this (groupId, artifactId, version, extension, null);
}
public MavenReference(String groupId, String artifactId, String version, String extension, String classifier) {
this.groupId = groupId;
this.artifactId = artifactId;
this.version = version;
this.extension = extension;
this.classifier = classifier;
}
public String getGroupId() {
return groupId;
}
public void setGroupId(String groupId) {
this.groupId = groupId;
}
public String getArtifactId() {
return artifactId;
}
public void setArtifactId(String artifactId) {
this.artifactId = artifactId;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
/**
* @deprecated invalid getter name, {@link #getExtension()}
*/
@Deprecated
public String getType() {
return extension;
}
/**
* @deprecated invalid setter name, {@link #setExtension()}
*/
@Deprecated
public void setType(String extension) {
this.extension = extension;
}
public String getExtension() {
return extension;
}
public void setExtension(String extension) {
this.extension = extension;
}
public String getClassifier() {
return classifier;
}
public void setClassifier(String classifier) {
this.classifier = classifier;
}
// TODO add JPA compatible equals and hashcode methods
@Override
public boolean equals(Object obj) {
return EqualsBuilder.reflectionEquals(this, obj, EXCLUDED_EQUALS_FIELDS);
}
@Override
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this, EXCLUDED_EQUALS_FIELDS);
}
/**
* Display maven reference as a GAV reference
*/
@Override
public String toString() {
StringBuilder result = new StringBuilder(64);
result.append(toGavString());
return result.toString();
}
/**
* Construct a MavenReference from a gav String
* @param gav
* format : Group:artifact:version:[classifier:[extension]]
* @return Corresponding maven reference
*/
public static MavenReference fromGavString(String gav) {
MavenReference resultMavenRef;
if (gav == null || gav.length() < 5) {
throw new IllegalArgumentException("String gav cannot be null nor empty");
}
StringTokenizer gavTokenizer = new StringTokenizer(gav, ":", true);
try {
String groupId = gavTokenizer.nextToken();
if (":".equals(groupId)) {
throw new NoSuchElementException("Missing groupId");
}
gavTokenizer.nextToken();
String artifactId = gavTokenizer.nextToken();
if (":".equals(artifactId)) {
throw new NoSuchElementException("Missing artifactId");
}
gavTokenizer.nextToken();
String version = gavTokenizer.nextToken();
if (":".equals(version)) {
throw new NoSuchElementException("Missing version");
}
if (gavTokenizer.hasMoreTokens()) {
gavTokenizer.nextToken();
}
resultMavenRef = new MavenReference(groupId, artifactId, version);
if (gavTokenizer.hasMoreTokens()) {
String classifier = gavTokenizer.nextToken();
if (!":".equals(classifier)) {
resultMavenRef.setClassifier(classifier);
if (gavTokenizer.hasMoreTokens()) {
gavTokenizer.nextToken();
}
}
}
if (gavTokenizer.hasMoreTokens()) {
String extension = gavTokenizer.nextToken();
if (!":".equals(extension)) {
resultMavenRef.setExtension(extension);
}
}
} catch (NoSuchElementException nsee) {
throw new IllegalArgumentException("Wrong format, should be groupId:artifactId:version:[classifier:[extension]]");
}
return resultMavenRef;
}
/**
* Return a generic identifier for the MavenReference
* Group:artifact:version:classifier:extension If groupId or version are
* null, then inherited is displayed. Other fields are displayed if present.
*/
public String toGavString() {
StringBuilder result = new StringBuilder(64);
result.append((getGroupId() == null) ? "[inherited]" : getGroupId());
result.append(":");
result.append(getArtifactId());
result.append(":");
result.append((getVersion() == null) ? "[inherited]" : getVersion());
result.append(":");
result.append((getClassifier() == null) ? "" : getClassifier());
result.append(":");
result.append((getExtension() == null) ? "" : getExtension());
return result.toString();
}
/**
* modified setter (internal storage as String)
*
* @return
*/
public URL getAccessUrl() {
if (this.accessUrl == null) {
return null;
}
try {
return new URL(this.accessUrl);
} catch (MalformedURLException e) {
logger.error("internal error : wrong url format??");
throw new IllegalArgumentException(e);
}
}
/**
* modified setter Internal Storage as String to permit JPA mapping
*
* @param accessUrl
*/
public void setAccessUrl(URL accessUrl) {
if (accessUrl == null) {
this.accessUrl = null;
} else {
this.accessUrl = accessUrl.toString();
}
}
public MavenReference duplicateWithNull() {
MavenReference result = new MavenReference(this);
if (result.accessUrl != null && result.accessUrl.equals("")) {
result.accessUrl = null;
}
if (result.artifactId != null && result.artifactId.equals("")) {
result.artifactId = null;
}
if (result.classifier != null && result.classifier.equals("")) {
result.classifier = null;
}
if (result.extension != null && result.extension.equals("")) {
result.extension = null;
}
if (result.groupId != null && result.groupId.equals("")) {
result.groupId = null;
}
if (result.version != null && result.version.equals("")) {
result.version = null;
}
return result;
}
public MavenReference duplicateWithEmpty() {
MavenReference result = new MavenReference(this);
if (result.accessUrl == null) {
result.accessUrl = "";
}
if (result.artifactId == null) {
result.artifactId = "";
}
if (result.classifier == null) {
result.classifier = "";
}
if (result.extension == null) {
result.extension = "";
}
if (result.groupId == null) {
result.groupId = "";
}
if (result.version == null) {
result.version = "";
}
return result;
}
/**
* Similar to {link {@link #toGavString()}, except <li>null or "" are not
* display <li>separator is - instead of :
*/
public String getArtifactName() {
StringBuilder result = new StringBuilder(64);
if (getArtifactId() != null && getArtifactId().length() > 0) {
result.append(getArtifactId());
}
if (getVersion() != null && getVersion().length() > 0) {
result.append("-" + getVersion());
}
if (getClassifier() != null && getClassifier().length() > 0) {
result.append("-" + getClassifier());
}
if (getExtension() != null && getExtension().length() > 0 && result.length() > 0) {
result.append("." + getExtension());
}
return result.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.sort;
import io.netty.buffer.DrillBuf;
import java.util.ArrayList;
import java.util.List;
import org.apache.drill.common.exceptions.DrillRuntimeException;
import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.memory.AllocationReservation;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.record.BatchSchema;
import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.record.VectorAccessible;
import org.apache.drill.exec.record.VectorContainer;
import org.apache.drill.exec.record.VectorWrapper;
import org.apache.drill.exec.record.selection.SelectionVector2;
import org.apache.drill.exec.record.selection.SelectionVector4;
import org.apache.drill.exec.vector.ValueVector;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Lists;
public class SortRecordBatchBuilder implements AutoCloseable {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SortRecordBatchBuilder.class);
private final ArrayListMultimap<BatchSchema, RecordBatchData> batches = ArrayListMultimap.create();
private int recordCount;
private long runningBatches;
private SelectionVector4 sv4;
private BufferAllocator allocator;
final AllocationReservation reservation;
public SortRecordBatchBuilder(BufferAllocator a) {
this.allocator = a;
this.reservation = a.newReservation();
}
private long getSize(VectorAccessible batch) {
long bytes = 0;
for (VectorWrapper<?> v : batch) {
bytes += v.getValueVector().getBufferSize();
}
return bytes;
}
/**
* Add another record batch to the set of record batches. TODO: Refactor this and other {@link #add
* (RecordBatchData)} method into one method.
* @param batch
* @return True if the requested add completed successfully. Returns false in the case that this builder is full and cannot receive additional packages.
* @throws SchemaChangeException
*/
public boolean add(VectorAccessible batch) {
if (batch.getSchema().getSelectionVectorMode() == SelectionVectorMode.FOUR_BYTE) {
throw new UnsupportedOperationException("A sort cannot currently work against a sv4 batch.");
}
if (batch.getRecordCount() == 0 && batches.size() > 0) {
return true; // skip over empty record batches.
}
long batchBytes = getSize(batch);
if (batchBytes == 0 && batches.size() > 0) {
return true;
}
if (runningBatches >= Character.MAX_VALUE) {
return false; // allowed in batch.
}
if (!reservation.add(batch.getRecordCount() * 4)) {
return false; // sv allocation available.
}
RecordBatchData bd = new RecordBatchData(batch, allocator);
runningBatches++;
batches.put(batch.getSchema(), bd);
recordCount += bd.getRecordCount();
return true;
}
@SuppressWarnings("resource")
public void add(RecordBatchData rbd) {
long batchBytes = getSize(rbd.getContainer());
if (batchBytes == 0 && batches.size() > 0) {
return;
}
if(runningBatches >= Character.MAX_VALUE) {
final String errMsg = String.format("Tried to add more than %d number of batches.", (int) Character.MAX_VALUE);
logger.error(errMsg);
throw new DrillRuntimeException(errMsg);
}
if (!reservation.add(rbd.getRecordCount() * 4)) {
final String errMsg = String.format("Failed to pre-allocate memory for SV. " + "Existing recordCount*4 = %d, " +
"incoming batch recordCount*4 = %d", recordCount * 4, rbd.getRecordCount() * 4);
logger.error(errMsg);
throw new DrillRuntimeException(errMsg);
}
if (rbd.getRecordCount() == 0 && batches.size() > 0) {
rbd.getContainer().zeroVectors();
SelectionVector2 sv2 = rbd.getSv2();
if (sv2 != null) {
sv2.clear();
}
return;
}
runningBatches++;
batches.put(rbd.getContainer().getSchema(), rbd);
recordCount += rbd.getRecordCount();
}
public void canonicalize() {
for (RecordBatchData batch : batches.values()) {
batch.canonicalize();
}
}
public boolean isEmpty() {
return batches.isEmpty();
}
public void build(FragmentContext context, VectorContainer outputContainer) throws SchemaChangeException {
build(outputContainer);
}
@SuppressWarnings("resource")
public void build(VectorContainer outputContainer) throws SchemaChangeException {
outputContainer.clear();
if (batches.keySet().size() > 1) {
throw new SchemaChangeException("Sort currently only supports a single schema.");
}
if (batches.size() > Character.MAX_VALUE) {
throw new SchemaChangeException("Sort cannot work on more than %d batches at a time.", (int) Character.MAX_VALUE);
}
if (batches.keys().size() < 1) {
assert false : "Invalid to have an empty set of batches with no schemas.";
}
final DrillBuf svBuffer = reservation.allocateBuffer();
if (svBuffer == null) {
throw new OutOfMemoryError("Failed to allocate direct memory for SV4 vector in SortRecordBatchBuilder.");
}
sv4 = new SelectionVector4(svBuffer, recordCount, Character.MAX_VALUE);
BatchSchema schema = batches.keySet().iterator().next();
List<RecordBatchData> data = batches.get(schema);
// now we're going to generate the sv4 pointers
switch (schema.getSelectionVectorMode()) {
case NONE: {
int index = 0;
int recordBatchId = 0;
for (RecordBatchData d : data) {
for (int i =0; i < d.getRecordCount(); i++, index++) {
sv4.set(index, recordBatchId, i);
}
recordBatchId++;
}
break;
}
case TWO_BYTE: {
int index = 0;
int recordBatchId = 0;
for (RecordBatchData d : data) {
for (int i = 0; i < d.getRecordCount(); i++, index++) {
sv4.set(index, recordBatchId, (int) d.getSv2().getIndex(i));
}
// might as well drop the selection vector since we'll stop using it now.
d.getSv2().clear();
recordBatchId++;
}
break;
}
default:
throw new UnsupportedOperationException();
}
// next, we'll create lists of each of the vector types.
ArrayListMultimap<MaterializedField, ValueVector> vectors = ArrayListMultimap.create();
for (RecordBatchData rbd : batches.values()) {
for (ValueVector v : rbd.getVectors()) {
vectors.put(v.getField(), v);
}
}
for (MaterializedField f : schema) {
List<ValueVector> v = vectors.get(f);
outputContainer.addHyperList(v, false);
}
outputContainer.buildSchema(SelectionVectorMode.FOUR_BYTE);
}
public SelectionVector4 getSv4() {
return sv4;
}
public void clear() {
for (RecordBatchData d : batches.values()) {
d.container.clear();
}
if (sv4 != null) {
sv4.clear();
}
}
@Override
public void close() {
reservation.close();
}
public List<VectorContainer> getHeldRecordBatches() {
ArrayList<VectorContainer> containerList = Lists.newArrayList();
for (BatchSchema bs : batches.keySet()) {
for (RecordBatchData bd : batches.get(bs)) {
VectorContainer c = bd.getContainer();
c.setRecordCount(bd.getRecordCount());
containerList.add(c);
}
}
batches.clear();
return containerList;
}
/**
* For given record count how much memory does SortRecordBatchBuilder needs for its own purpose. This is used in
* ExternalSortBatch to make decisions about whether to spill or not.
*
* @param recordCount
* @return
*/
public static long memoryNeeded(int recordCount) {
// We need 4 bytes (SV4) for each record. Due to power-of-two allocations, the
// backing buffer might be twice this size.
return recordCount * 2 * 4;
}
}
| |
/*
* Copyright 2011-2015 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.commons;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.StringTokenizer;
/**
* This class is for storing repository address history.
*/
public class URLHistory {
private List<String> addresses;
private static final String DELIMITER = "|";
private static final int MAX_SIZE = 5;
private static URLHistory urlHistory;
/**
* Private constructor.
*/
private URLHistory() {
this.addresses = new ArrayList<String>();
}
/**
* Set addresses.
*
* @param addresses list of repository addresses stored as string
*/
public void setAddresses(List<String> addresses) {
this.addresses = null;
this.addresses = addresses;
}
/**
* Retrieve addresses.
*
* @return list of addresses stores as string
*/
public List<String> getAddressesAsList() {
Collections.reverse(addresses);
return this.addresses;
}
/**
* Retrieve addresses as single string (concatenation of list element with
* delimiter).
*
* @return concatenation of repository address history with delimiter
*/
public String getAddressesAsString() {
StringBuffer buffer = new StringBuffer();
for (String listElement : this.addresses) {
if (!listElement.isEmpty()) {
buffer.append(listElement);
buffer.append(DELIMITER);
}
}
return new String(buffer);
}
/**
* Converts a string of repository address histories list of strings and sets
* the addresses.
*
* @param addresses - concatenation of repository address history with delimiter
*/
public void setAddresses(String addresses) {
StringTokenizer stringTokenizer = new StringTokenizer(addresses, DELIMITER);
int tokenNumber = stringTokenizer.countTokens();
List<String> retList = new ArrayList<String>(tokenNumber);
for (int i = 0; i < tokenNumber; i++) {
String parsedString = stringTokenizer.nextToken();
if (!(parsedString == null || parsedString.trim().length() == 0)) {
retList.add(parsedString);
}
}
this.setAddresses(retList);
}
/**
* Add a concatenation of repository address to the repository address
* histories.
*
* @param addresses - concatenation of repository address history with delimiter
*/
public void addAddress(String addresses) {
StringTokenizer stringTokenizer = new StringTokenizer(addresses, DELIMITER);
int tokenNumber = stringTokenizer.countTokens();
List<String> retList = new ArrayList<String>(tokenNumber);
for (int i = 0; i < tokenNumber; i++) {
String parsedString = stringTokenizer.nextToken();
if (!(parsedString == null || parsedString.trim().length() == 0)) {
retList.add(parsedString);
}
}
this.add(retList);
}
/**
* Add a single repository address to the repository address histories.
*
* @param address - a single repository address
*/
public void add(String address) {
if (isFull()) {
List<String> tempList = new ArrayList<String>(MAX_SIZE + 1);
tempList = this.addresses;
tempList.add(address);
tempList.subList(0, 1).clear();
this.addresses = tempList;
} else {
this.addresses.add(address);
}
}
/**
* Add a list of addresses.
*
* @param addresses - a list of repository addresses
*/
public void add(List<String> addresses) {
for (String listElement : addresses) {
add(listElement);
}
}
/**
* Checks whether the list is full or not.
*
* @return is the queue full
*/
public boolean isFull() {
if (this.addresses.size() == MAX_SIZE) {
return true;
} else {
return false;
}
}
/**
* Static method for singleton.
*
* @return instance of URLHistory
*/
public static URLHistory getInstance() {
if (urlHistory == null) {
urlHistory = new URLHistory();
}
return urlHistory;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return this.addresses.toString();
}
/**
* Get the latest repository address from the queue as string.
*
* @return latest repository address
*/
public String getServerAddress() {
if (this.addresses == null) {
return "";
} else {
int momenatryAddressSize = this.addresses.size();
try {
int lastIndexOf = this.addresses.get(momenatryAddressSize - 1).lastIndexOf("/");// FIXME check
// StringIndexOutOfBoundsException
return this.addresses.get(momenatryAddressSize - 1).substring(0, lastIndexOf).trim();
} catch (StringIndexOutOfBoundsException e) {
return this.addresses.get(momenatryAddressSize - 1);
}
}
}
/**
* Get the latest repository ID from the queue as string.
*
* @return latest repository ID
*/
public String getRepositoryID() {
if (this.addresses == null) {
return "";
} else {
int momenatryAddressSize = this.addresses.size();
try {
int lastIndexOf = this.addresses.get(momenatryAddressSize - 1).lastIndexOf("/");// FIXME check
// StringIndexOutOfBoundsException
return this.addresses.get(momenatryAddressSize - 1).substring(lastIndexOf + 1).trim();
} catch (StringIndexOutOfBoundsException e) {
return this.addresses.get(momenatryAddressSize - 1);
}
}
}
}
| |
/**
* Angular Cordova Demo using JEE7 backend
*
* File: CV.java, 19.08.2014, 18:49:55, mreinhardt
*
* @project https://github.com/hypery2k/angular_cordova_app
*
* @copyright 2014 Martin Reinhardt contact@martinreinhardt-online.de
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package de.mare.mobile.domain.cv;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.OneToMany;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import javax.xml.bind.annotation.XmlRootElement;
import de.mare.mobile.domain.MultilingualString;
import de.mare.mobile.domain.User;
import de.mare.mobile.domain.validation.ValidationConditions;
import de.mare.mobile.domain.validation.ValidatorFactory;
/**
* CV object
*
* @author mreinhardt
*
*/
@Entity
@XmlRootElement
public class CV implements Serializable {
/**
* Serial ID
*/
private static final long serialVersionUID = 5418845479111419180L;
@Id
@GeneratedValue
@Column(name = "cv_id")
private Long id;
@NotNull
@Valid
private User person;
@ElementCollection
@CollectionTable(name = "locales", joinColumns = @JoinColumn(name = "locale_id"))
private List<Locale> localizations;
private MultilingualString summary;
@OneToMany
private List<Job> professionalExperience;
@OneToMany
private List<Project> projects;
@OneToMany
private List<Skill> competencies;
@OneToMany
private List<Study> studies;
@OneToMany
private List<LanguageSkill> languageSkills;
@ElementCollection
@CollectionTable(name = "interests", joinColumns = @JoinColumn(name = "interest_id"))
private List<String> interests;
@OneToMany
private List<AcademicWork> academicWorks = new ArrayList<AcademicWork>();
/**
* Default constructor is necessary because of existing of the private
* "Builder" constructor.
*/
public CV() {
}
private CV(Builder builder) {
person = builder.person;
localizations = builder.localizations;
summary = builder.summary;
professionalExperience = builder.professionalExperience;
projects = builder.projects;
competencies = builder.competencies;
studies = builder.studies;
languageSkills = builder.languageSkills;
interests = builder.interests;
academicWorks = builder.academicWorks;
}
public Long getId() {
return id;
}
public User getUser() {
return person;
}
public void setUser(User person) {
this.person = person;
}
/**
* @return the person
*/
public User getPerson() {
return person;
}
/**
* @param person
* the person to set
*/
public void setPerson(User person) {
this.person = person;
}
/**
* @return the localizations
*/
public List<Locale> getLocalizations() {
return localizations;
}
/**
* @param localizations
* the localizations to set
*/
public void setLocalizations(List<Locale> localizations) {
this.localizations = localizations;
}
/**
* @return the summary
*/
public MultilingualString getSummary() {
return summary;
}
/**
* @param summary
* the summary to set
*/
public void setSummary(MultilingualString summary) {
this.summary = summary;
}
public List<Job> getProfessionalExperience() {
return professionalExperience;
}
public void setProfessionalExperience(List<Job> professionalExperience) {
this.professionalExperience = professionalExperience;
}
public List<Project> getProjects() {
return projects;
}
public void setProjects(List<Project> projects) {
this.projects = projects;
}
public List<Skill> getCompetencies() {
return competencies;
}
public void setCompetencies(List<Skill> competencies) {
this.competencies = competencies;
}
public List<Study> getStudies() {
return studies;
}
public void setStudies(List<Study> studies) {
this.studies = studies;
}
public List<LanguageSkill> getLanguageSkills() {
return languageSkills;
}
public void setLanguageSkills(List<LanguageSkill> languageSkills) {
this.languageSkills = languageSkills;
}
public List<String> getInterests() {
return interests;
}
public void setInterests(List<String> interests) {
this.interests = interests;
}
public List<AcademicWork> getAcademicWorks() {
return academicWorks;
}
public void setAcademicWorks(List<AcademicWork> academicWorks) {
this.academicWorks = academicWorks;
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder builder2 = new StringBuilder();
builder2.append("CV [");
if (id != null) {
builder2.append("id=");
builder2.append(id);
builder2.append(", ");
}
if (person != null) {
builder2.append("person=");
builder2.append(person);
builder2.append(", ");
}
if (localizations != null) {
builder2.append("localizations=");
builder2.append(localizations);
builder2.append(", ");
}
if (summary != null) {
builder2.append("summary=");
builder2.append(summary);
builder2.append(", ");
}
if (professionalExperience != null) {
builder2.append("professionalExperience=");
builder2.append(professionalExperience);
builder2.append(", ");
}
if (projects != null) {
builder2.append("projects=");
builder2.append(projects);
builder2.append(", ");
}
if (competencies != null) {
builder2.append("competencies=");
builder2.append(competencies);
builder2.append(", ");
}
if (studies != null) {
builder2.append("studies=");
builder2.append(studies);
builder2.append(", ");
}
if (languageSkills != null) {
builder2.append("languageSkills=");
builder2.append(languageSkills);
builder2.append(", ");
}
if (interests != null) {
builder2.append("interests=");
builder2.append(interests);
builder2.append(", ");
}
if (academicWorks != null) {
builder2.append("academicWorks=");
builder2.append(academicWorks);
}
builder2.append("]");
return builder2.toString();
}
public static class Builder {
private User person;
private List<Locale> localizations;
private MultilingualString summary;
private List<Job> professionalExperience = new ArrayList<>();
private List<Project> projects;
private List<Skill> competencies = new ArrayList<>();
private List<Study> studies = new ArrayList<>();
private List<LanguageSkill> languageSkills = new ArrayList<>();
private List<String> interests = new ArrayList<>();
private List<AcademicWork> academicWorks;
public Builder withUser(User person) {
this.person = person;
return this;
}
public Builder withLocalization(List<Locale> pLocalizations) {
this.localizations = pLocalizations;
if (pLocalizations != null) {
if (this.summary != null && this.summary.getLocalizedMessages().size() > 0) {
for (Locale locale : pLocalizations) {
if (!this.summary.getLocalizedMessages().containsKey(locale)) {
this.summary.addText(locale, "");
}
}
} else {
this.summary = new MultilingualString();
for (Locale locale : pLocalizations) {
this.summary.addText(locale, "");
}
}
}
// TODO add other localized strings to (maybe type interfer!)
return this;
}
public Builder withSummary(MultilingualString summary) {
this.summary = summary;
return this;
}
public Builder withProfessionalExperience(List<Job> professionalExperience) {
this.professionalExperience = professionalExperience;
return this;
}
public Builder withProjects(List<Project> projects) {
this.projects = projects;
return this;
}
public Builder withCompetencies(List<Skill> competencies) {
this.competencies = competencies;
return this;
}
public Builder withStudies(List<Study> studies) {
this.studies = studies;
return this;
}
public Builder withLanguageSkills(List<LanguageSkill> languageSkills) {
this.languageSkills = languageSkills;
return this;
}
public Builder withInterests(List<String> interests) {
this.interests = interests;
return this;
}
public Builder withListAcademicWorks(List<AcademicWork> academicWorks) {
this.academicWorks = academicWorks;
return this;
}
public CV build() {
CV newCV = new CV(this);
ValidationConditions.isValid(newCV, ValidatorFactory.getValidator());
return newCV;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gemstone.gemfire.internal.cache;
import com.gemstone.gemfire.*;
import com.gemstone.gemfire.cache.*;
import com.gemstone.gemfire.internal.Assert;
import com.gemstone.gemfire.distributed.internal.*;
import dunit.*;
//import java.io.File;
//import java.util.*;
/**
* This is the abstract superclass of tests that validate the
* functionality of GemFire's distributed caches. It provides a
* number of convenient helper classes.
*/
public abstract class DistributedCacheTestCase
extends DistributedTestCase {
/** The current cache in this VM */
protected static Cache cache = null;
/////////////////////// Constructors ///////////////////////
public DistributedCacheTestCase(String name) {
super(name);
}
public void setUp() throws Exception {
setUp(true);
}
/**
* Creates the {@link Cache} and root region in each remote VM
* and, if createLocalCache, in this VM.
*/
protected void setUp(boolean createLocalCache) throws Exception {
super.setUp();
if (createLocalCache) {
try {
remoteCreateCache();
assertTrue(cache != null);
} catch (Exception ex) {
String s = "While creating cache in this VM";
throw new InternalGemFireException(s, ex);
}
} else {
this.getSystem(); // make sure we have a connected DistributedSystem
}
for (int h = 0; h < Host.getHostCount(); h++) {
Host host = Host.getHost(h);
for (int v = 0; v < host.getVMCount(); v++) {
VM vm = host.getVM(v);
vm.invoke(this.getClass(), "remoteCreateCache",
new Object[] {});
}
}
}
/**
* Creates the root region in a remote VM
*/
private static void remoteCreateCache()
throws Exception {
Assert.assertTrue(cache == null, "cache should be null");
DistributedCacheTestCase x = new DistributedCacheTestCase("Lame") { };
cache = CacheFactory.create(x.getSystem());
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_NO_ACK);
cache.createRegion("root", factory.create());
}
/**
* Closes the cache in this VM and each remote VM
*/
public void tearDown2() throws Exception {
StringBuffer problems = new StringBuffer();
if (cache != null) {
try {
if (remoteCloseCache()) {
problems.append("An exception occurred trying to close the cache.");
}
assertTrue(cache == null);
} catch (Exception ex) {
String s = "While closing the cache in this VM";
throw new InternalGemFireException(s, ex);
}
}
for (int h = 0; h < Host.getHostCount(); h++) {
Host host = Host.getHost(h);
for (int v = 0; v < host.getVMCount(); v++) {
VM vm = host.getVM(v);
boolean exceptionInThreads =
vm.invokeBoolean(this.getClass(), "remoteCloseCache");
if (exceptionInThreads) {
String s = "An exception occurred in GemFire system";
problems.append(s);
}
}
}
assertEquals("Problems while tearing down",
"", problems.toString().trim());
super.tearDown2();
}
/**
* Closes the Cache for the current VM. Returns whether or not an
* exception occurred in the distribution manager to which this VM
* is attached. Note that the exception flag is cleared by this
* method.
*
* @see DistributionManager#exceptionInThreads()
*/
private static boolean remoteCloseCache() throws CacheException {
Assert.assertTrue(cache != null, "No cache on this VM?");
Assert.assertTrue(!cache.isClosed(), "Who closed my cache?");
InternalDistributedSystem system = (InternalDistributedSystem)
((GemFireCacheImpl) cache).getDistributedSystem();
DistributionManager dm = (DistributionManager)system.getDistributionManager();
boolean exceptionInThreads = dm.exceptionInThreads();
DistributionManagerDUnitTest.clearExceptionInThreads(dm);
cache.close();
cache = null;
return exceptionInThreads;
}
////////////////////// Helper Methods //////////////////////
/**
* Returns the root region of the cache. We assume that the {@link
* Cache} and the root region have already been created.
*/
protected static Region getRootRegion() throws CacheException {
if (cache == null) {
String s = "Cache not created yet!";
throw new IllegalStateException(s);
}
return cache.getRegion("root");
}
/**
* Return the distribution manager associate with the cache
*
* @since 2.1
*/
protected static DistributionManager getDistributionManager() {
if (cache == null) {
String s = "Cache not created yet!";
throw new IllegalStateException(s);
}
InternalDistributedSystem system = (InternalDistributedSystem)
((GemFireCacheImpl) cache).getDistributedSystem();
return (DistributionManager)system.getDistributionManager();
}
/**
* Creates a new sub-Region of the root Region in a remote VM with
* default scope, SCOPE_LOCAL.
*
* @param name
* The name of the newly-created sub-Region. It is
* recommended that the name of the Region be the {@link
* #getUniqueName()} of the test.
*/
protected static void remoteCreateRegion(String name)
throws CacheException {
remoteCreateRegion(name, Scope.LOCAL);
}
/**
* Creates a new sub-Region of the root Region in a remote VM.
*
* @param name
* The name of the newly-created sub-Region. It is
* recommended that the name of the Region be the {@link
* #getUniqueName()} of the test.
* @param scope create the region attributes with this scope
*/
protected static void remoteCreateRegion(String name, Scope scope)
throws CacheException {
Region root = getRootRegion();
AttributesFactory factory = new AttributesFactory();
factory.setScope(scope);
Region newRegion =
root.createSubregion(name, factory.create());
getLogWriter().info(
"Created Region '" + newRegion.getFullPath() + "'");
}
/**
* Defines an entry in the Region with the given
* name and scope.
*
* @param regionName
* The name of a region that is a sub-region of the root
* region, or a global name
* @param entryName
* Must be {@link java.io.Serializable}
*/
protected static void remoteDefineEntry(String regionName,
String entryName,
Scope scope)
throws CacheException {
remoteDefineEntry(regionName, entryName, scope, true);
}
/**
* Defines an entry in the Region with the given name and scope. In
* 3.0 this method create a subregion named <code>entryName</code>
* (with the appropriate attributes) that contains an entry named
* <code>entryName</code>.
*
* @param regionName
* The name of a region that is a sub-region of the root
* region, or a global name
* @param entryName
* Must be {@link java.io.Serializable}
* @param doNetSearch
* Will the distributed region perform a netSearch when
* looking for objects? If <code>false</code> a {@link
* CacheException} will be thrown if an entry in the region
* is asked for and it not there.
*/
protected static void remoteDefineEntry(String regionName,
String entryName,
Scope scope,
boolean doNetSearch)
throws CacheException {
Region root = getRootRegion();
Region region = root.getSubregion(regionName);
AttributesFactory factory = new AttributesFactory();
factory.setScope(scope);
if (!doNetSearch) {
factory.setCacheLoader(new CacheLoader() {
public Object load(LoaderHelper helper)
throws CacheLoaderException {
String s = "Should not be loading \"" + helper.getKey() +
"\" in \"" + helper.getRegion().getFullPath() + "\"";
throw new CacheLoaderException(s);
}
public void close() { }
});
}
Region sub =
region.createSubregion(entryName,
factory.create());
sub.create(entryName, null);
getLogWriter().info(
"Defined Entry named '" + entryName + "' in region '" +
sub.getFullPath() +"'");
}
/**
* Puts (or creates) a value in a subregion of <code>region</code>
* named <code>entryName</code>.
*/
protected static void remotePut(String regionName,
String entryName,
Object value, Scope scope)
throws CacheException {
Region root = getRootRegion();
Region region = root.getSubregion(regionName);
Region sub = region.getSubregion(entryName);
if (sub == null) {
AttributesFactory factory = new AttributesFactory();
factory.setScope(scope);
sub = region.createSubregion(entryName,
factory.create());
}
sub.put(entryName, value);
getLogWriter().info(
"Put value " + value + " in entry " + entryName + " in region '" +
region.getFullPath() +"'");
}
/**
* Does a put with the given value, defining a DISTRIBUTED_NO_ACK entry
* in the Region with the given name.
*
* @param regionName
* The name of a region that is a sub-region of the root
* region, or a global name
* @param entryName
* Must be {@link java.io.Serializable}
*/
protected static void remotePutDistributed(String regionName,
String entryName,
Object value)
throws CacheException {
remotePut(regionName, entryName, value, Scope.DISTRIBUTED_NO_ACK);
}
/**
* Replaces the value of an entry in a region in a remote VM
*
* @param regionName
* The name of a region that is a sub-region of the root
* region
* @param entryName
* Must be {@link java.io.Serializable}
* @param value
* The value used to replace
*
* @see Region#put()
*/
protected static void remoteReplace(String regionName,
String entryName,
Object value)
throws CacheException {
Region root = getRootRegion();
Region region = root.getSubregion(regionName);
Region sub = region.getSubregion(entryName);
if (sub == null) {
String s = "Entry \"" + entryName + "\" does not exist";
throw new EntryNotFoundException(s);
}
sub.put(entryName, value);
getLogWriter().info(
"Replaced value " + value + "in entry " + entryName + " in region '" +
region.getFullPath() +"'");
}
/**
* Invalidates the value of an entry in a region in a remote VM
*
* @param regionName
* The name of a region that is a sub-region of the root
* region
* @param entryName
* Must be {@link java.io.Serializable}
*
* @see Region#replace()
*/
protected static void remoteInvalidate(String regionName,
String entryName)
throws CacheException {
Region root = getRootRegion();
Region region = root.getSubregion(regionName);
Region sub = region.getSubregion(entryName);
if (sub == null) {
String s = "Entry \"" + entryName + "\" does not exist";
throw new EntryNotFoundException(s);
}
sub.invalidate(entryName);
}
/**
* Destroys the value of an entry in a region in a remote VM
*
* @param regionName
* The name of a region that is a sub-region of the root
* region
* @param entryName
* Must be {@link java.io.Serializable}
*
* @see Region#replace()
*/
protected static void remoteDestroy(String regionName,
String entryName)
throws CacheException {
Region root = getRootRegion();
Region region = root.getSubregion(regionName);
Region sub = region.getSubregion(entryName);
if (sub == null) {
String s = "Entry \"" + entryName + "\" does not exist";
throw new EntryNotFoundException(s);
}
assertNotNull(sub.getEntry(entryName));
sub.destroy(entryName);
assertNull(sub.getEntry(entryName));
}
/**
* Asserts that the value of an entry in a region is what we expect
* it to be.
*
* @param regionName
* The name of a region that is a sub-region of the root
* region
* @param entryName
* Must be {@link java.io.Serializable}
*/
protected static void remoteAssertEntryValue(String regionName,
String entryName,
Object expected)
throws CacheException {
Region root = getRootRegion();
Region region = root.getSubregion(regionName);
Region sub = region.getSubregion(entryName);
if (sub == null) {
String s = "Entry \"" + entryName + "\" does not exist";
throw new EntryNotFoundException(s);
}
assertEquals(expected, sub.get(entryName));
}
/**
* Assumes there is only one host, and invokes the given method in
* every VM that host knows about.
*/
public void forEachVMInvoke(String methodName, Object[] args) {
Host host = Host.getHost(0);
int vmCount = host.getVMCount();
for (int i=0; i<vmCount; i++) {
getLogWriter().info("Invoking " + methodName + "on VM#" + i);
host.getVM(i).invoke(this.getClass(), methodName, args);
}
}
}
| |
/*
* Copyright (C) 2010 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.i18n.addressinput.common;
import static com.google.i18n.addressinput.common.Util.checkNotNull;
import com.google.i18n.addressinput.common.AsyncRequestApi.AsyncCallback;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.EventListener;
import java.util.HashMap;
import java.util.HashSet;
import java.util.logging.Logger;
/**
* Cache for dynamic address data.
*/
public final class CacheData {
private static final Logger logger = Logger.getLogger(CacheData.class.getName());
/**
* Time-out value for the server to respond in milliseconds.
*/
private static final int TIMEOUT = 5000;
/**
* URL to get address data. You can also reset it by calling {@link #setUrl(String)}.
*/
private String serviceUrl;
private final AsyncRequestApi asyncRequestApi;
/**
* Storage for all dynamically retrieved data.
*/
private final JsoMap cache;
/**
* CacheManager that handles caching that is needed by the client of the Address Widget.
*/
private final ClientCacheManager clientCacheManager;
/**
* All requests that have been sent.
*/
private final HashSet<String> requestedKeys = new HashSet<String>();
/**
* All invalid requested keys. For example, if we request a random string "asdfsdf9o", and the
* server responds by saying this key is invalid, it will be stored here.
*/
private final HashSet<String> badKeys = new HashSet<String>();
/**
* Temporary store for {@code CacheListener}s. When a key is requested and still waiting for
* server's response, the listeners for the same key will be temporary stored here. When the
* server responded, these listeners will be triggered and then removed.
*/
private final HashMap<LookupKey, HashSet<CacheListener>> temporaryListenerStore =
new HashMap<LookupKey, HashSet<CacheListener>>();
/**
* Creates an instance of CacheData with an empty cache, and uses no caching that is external
* to the AddressWidget.
*/
// TODO(dbeaumont): Remove this method (avoid needless dependency on SimpleClientCacheManager).
public CacheData(AsyncRequestApi asyncRequestApi) {
this(new SimpleClientCacheManager(), asyncRequestApi);
}
/**
* Creates an instance of CacheData with an empty cache, and uses additional caching (external
* to the AddressWidget) specified by clientCacheManager.
*/
public CacheData(ClientCacheManager clientCacheManager, AsyncRequestApi asyncRequestApi) {
this.clientCacheManager = clientCacheManager;
setUrl(this.clientCacheManager.getAddressServerUrl());
this.cache = JsoMap.createEmptyJsoMap();
this.asyncRequestApi = asyncRequestApi;
}
/**
* This constructor is meant to be used together with external caching. Use case:
* <p>
* After having finished using the address widget:
* <ol>
* <li>String allCachedData = getJsonString();
* <li>Cache (save) allCachedData wherever makes sense for your service / activity
* </ol>
* <p>
* Before using it next time:
* <ol>
* <li>Get the saved allCachedData string
* <li>new ClientData(new CacheData(allCachedData))
* </ol>
* <p>
* If you don't have any saved data you can either just pass an empty string to
* this constructor or use the other constructor.
*
* @param jsonString cached data from last time the class was used
*/
// TODO(dbeaumont): Remove this method (if callers need to build from json string, do it first).
public CacheData(String jsonString, AsyncRequestApi asyncRequestApi) {
clientCacheManager = new SimpleClientCacheManager();
setUrl(clientCacheManager.getAddressServerUrl());
JsoMap tempMap = null;
try {
tempMap = JsoMap.buildJsoMap(jsonString);
} catch (JSONException jsonE) {
// If parsing the JSON string throws an exception, default to
// starting with an empty cache.
logger.warning("Could not parse json string, creating empty cache instead.");
tempMap = JsoMap.createEmptyJsoMap();
} finally {
cache = tempMap;
}
this.asyncRequestApi = asyncRequestApi;
}
/**
* Interface for all listeners to {@link CacheData} change. This is only used when multiple
* requests of the same key is dispatched and server has not responded yet.
*/
private static interface CacheListener extends EventListener {
/**
* The function that will be called when valid data is about to be put in the cache.
*
* @param key the key for newly arrived data.
*/
void onAdd(String key);
}
/**
* Class to handle JSON response.
*/
private class JsonHandler {
/**
* Key for the requested data.
*/
private final String key;
/**
* Pre-existing data for the requested key. Null is allowed.
*/
private final JSONObject existingJso;
private final DataLoadListener listener;
/**
* Constructs a JsonHandler instance.
*
* @param key The key for requested data.
* @param oldJso Pre-existing data for this key or null.
*/
private JsonHandler(String key, JSONObject oldJso, DataLoadListener listener) {
checkNotNull(key);
this.key = key;
this.existingJso = oldJso;
this.listener = listener;
}
/**
* Saves valid responded data to the cache once data arrives, or if the key is invalid,
* saves it in the invalid cache. If there is pre-existing data for the key, it will merge
* the new data will the old one. It also triggers {@link DataLoadListener#dataLoadingEnd()}
* method before it returns (even when the key is invalid, or input jso is null). This is
* called from a background thread.
*
* @param map The received JSON data as a map.
*/
private void handleJson(JsoMap map) {
// Can this ever happen?
if (map == null) {
logger.warning("server returns null for key:" + key);
badKeys.add(key);
notifyListenersAfterJobDone(key);
triggerDataLoadingEndIfNotNull(listener);
return;
}
JSONObject json = map;
String idKey = Util.toLowerCaseLocaleIndependent(AddressDataKey.ID.name());
if (!json.has(idKey)) {
logger.warning("invalid or empty data returned for key: " + key);
badKeys.add(key);
notifyListenersAfterJobDone(key);
triggerDataLoadingEndIfNotNull(listener);
return;
}
if (existingJso != null) {
map.mergeData((JsoMap) existingJso);
}
cache.putObj(key, map);
notifyListenersAfterJobDone(key);
triggerDataLoadingEndIfNotNull(listener);
}
}
/**
* Sets address data server URL. Input URL cannot be null.
*
* @param url The service URL.
*/
public void setUrl(String url) {
checkNotNull(url, "Cannot set URL of address data server to null.");
serviceUrl = url;
}
/**
* Gets address data server URL.
*/
public String getUrl() {
return serviceUrl;
}
/**
* Returns a JSON string representing the data currently stored in this cache. It can be used
* to later create a new CacheData object containing the same cached data.
*
* @return a JSON string representing the data stored in this cache
*/
public String getJsonString() {
return cache.toString();
}
/**
* Checks if key and its value is cached (Note that only valid ones are cached).
*/
public boolean containsKey(String key) {
return cache.containsKey(key);
}
// This method is called from a background thread.
private void triggerDataLoadingEndIfNotNull(DataLoadListener listener) {
if (listener != null) {
listener.dataLoadingEnd();
}
}
/**
* Fetches data from server, or returns if the data is already cached. If the fetched data is
* valid, it will be added to the cache. This method also triggers {@link
* DataLoadListener#dataLoadingEnd()} method before it returns.
*
* @param existingJso Pre-existing data for this key or null if none.
* @param listener An optional listener to call when done.
*/
void fetchDynamicData(
final LookupKey key, JSONObject existingJso, final DataLoadListener listener) {
checkNotNull(key, "null key not allowed.");
notifyStart(listener);
// Key is valid and cached.
if (cache.containsKey(key.toString())) {
triggerDataLoadingEndIfNotNull(listener);
return;
}
// Key is invalid and cached.
if (badKeys.contains(key.toString())) {
triggerDataLoadingEndIfNotNull(listener);
return;
}
// Already requested the key, and is still waiting for server's response.
if (!requestedKeys.add(key.toString())) {
logger.fine("data for key " + key + " requested but not cached yet");
addListenerToTempStore(key, new CacheListener() {
@Override
public void onAdd(String myKey) {
triggerDataLoadingEndIfNotNull(listener);
}
});
return;
}
// Key is in the cache maintained by the client of the AddressWidget.
String dataFromClientCache = clientCacheManager.get(key.toString());
if (dataFromClientCache != null && dataFromClientCache.length() > 0) {
final JsonHandler handler = new JsonHandler(key.toString(), existingJso, listener);
try {
handler.handleJson(JsoMap.buildJsoMap(dataFromClientCache));
return;
} catch (JSONException e) {
logger.warning("Data from client's cache is in the wrong format: " + dataFromClientCache);
}
}
// Key is not cached yet, now sending the request to the server.
final JsonHandler handler = new JsonHandler(key.toString(), existingJso, listener);
asyncRequestApi.requestObject(serviceUrl + "/" + key.toString(), new AsyncCallback() {
@Override
public void onFailure() {
logger.warning("Request for key " + key + " failed");
requestedKeys.remove(key.toString());
notifyListenersAfterJobDone(key.toString());
triggerDataLoadingEndIfNotNull(listener);
}
@Override
public void onSuccess(JsoMap result) {
handler.handleJson(result);
// Put metadata into the cache maintained by the client of the AddressWidget.
String dataRetrieved = result.toString();
clientCacheManager.put(key.toString(), dataRetrieved);
}
},
TIMEOUT);
}
/**
* Gets region data from our compiled-in java file and stores it in the cache. This is only called
* when data cannot be obtained from the server, so there will be no pre-existing data for this
* key.
*/
void getFromRegionDataConstants(final LookupKey key) {
checkNotNull(key, "null key not allowed.");
String data = RegionDataConstants.getCountryFormatMap().get(
key.getValueForUpperLevelField(AddressField.COUNTRY));
if (data != null) {
try {
cache.putObj(key.toString(), JsoMap.buildJsoMap(data));
} catch (JSONException e) {
logger.warning("Failed to parse data for key " + key + " from RegionDataConstants");
}
}
}
/**
* Retrieves string data identified by key.
*
* @param key Non-null key. E.g., "data/US/CA".
* @return String value for specified key.
*/
public String get(String key) {
checkNotNull(key, "null key not allowed");
return cache.get(key);
}
/**
* Retrieves JsoMap data identified by key.
*
* @param key Non-null key. E.g., "data/US/CA".
* @return String value for specified key.
*/
public JsoMap getObj(String key) {
checkNotNull(key, "null key not allowed");
return cache.getObj(key);
}
/** Notifies the listener when we start loading data. */
private void notifyStart(DataLoadListener listener) {
if (listener != null) {
listener.dataLoadingBegin();
}
}
private void notifyListenersAfterJobDone(String key) {
LookupKey lookupKey = new LookupKey.Builder(key).build();
HashSet<CacheListener> listeners = temporaryListenerStore.get(lookupKey);
if (listeners != null) {
for (CacheListener listener : listeners) {
listener.onAdd(key.toString());
}
listeners.clear();
}
}
private void addListenerToTempStore(LookupKey key, CacheListener listener) {
checkNotNull(key);
checkNotNull(listener);
HashSet<CacheListener> listeners = temporaryListenerStore.get(key);
if (listeners == null) {
listeners = new HashSet<CacheListener>();
temporaryListenerStore.put(key, listeners);
}
listeners.add(listener);
}
/**
* Added for testing purposes. Adds a new object into the cache.
*
* @param id string of the format "data/country/.." ie. "data/US/CA"
* @param object The JSONObject to be put into cache.
*/
void addToJsoMap(String id, JSONObject object) {
cache.putObj(id, object);
}
/**
* Added for testing purposes. Checks to see if the cache is empty,
*
* @return true if the internal cache is empty
*/
boolean isEmpty() {
return cache.length() == 0;
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.ui.table;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.EditorFontType;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.ui.*;
import com.intellij.ui.table.JBTable;
import com.intellij.util.ui.AbstractTableCellEditor;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.table.DefaultTableCellRenderer;
import javax.swing.table.TableCellEditor;
import javax.swing.table.TableCellRenderer;
import java.awt.*;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.util.List;
import static java.awt.event.KeyEvent.*;
/**
* @author Konstantin Bulenkov
*/
public abstract class JBListTable extends JPanel {
protected final JTable myInternalTable;
private final JBTable mainTable;
private final Ref<Integer> myLastEditorIndex = Ref.create(null);
private MouseEvent myMouseEvent;
private MyCellEditor myCellEditor;
public JBListTable(@NotNull final JTable t) {
super(new BorderLayout());
myInternalTable = t;
final JBListTableModel model = new JBListTableModel(t.getModel()) {
@Override
public JBTableRow getRow(int index) {
return getRowAt(index);
}
@Override
public boolean isCellEditable(int rowIndex, int columnIndex) {
return isRowEditable(rowIndex);
}
@Override
public void addRow() {
myLastEditorIndex.set(null);
super.addRow();
}
};
mainTable = new JBTable(model) {
@Override
public void editingStopped(ChangeEvent e) {
super.editingStopped(e);
}
@Override
public void editingCanceled(ChangeEvent e) {
super.editingCanceled(e);
}
@Override
protected void processKeyEvent(KeyEvent e) {
myMouseEvent = null;
//Mnemonics
if (e.isAltDown()) {
super.processKeyEvent(e);
return;
}
if (e.getKeyCode() == VK_TAB) {
if (e.getID() == KEY_PRESSED) {
final KeyboardFocusManager keyboardFocusManager = KeyboardFocusManager.getCurrentKeyboardFocusManager();
if (e.isShiftDown()) {
keyboardFocusManager.focusPreviousComponent(this);
}
else {
keyboardFocusManager.focusNextComponent(this);
}
}
e.consume();
return;
}
super.processKeyEvent(e);
}
@Override
protected void processMouseEvent(MouseEvent e) {
myMouseEvent = e;
super.processMouseEvent(e);
}
@Override
public TableCellRenderer getCellRenderer(int row, int column) {
return new DefaultTableCellRenderer() {
@Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean selected, boolean hasFocus, int row, int col) {
return getRowRenderer(t, row, selected, hasFocus);
}
};
}
@Override
protected boolean processKeyBinding(KeyStroke ks, KeyEvent e, int condition, boolean pressed) {
//Mnemonics and actions
if (e.isAltDown() || e.isMetaDown() || e.isControlDown()) {
return false;
}
if (e.getKeyCode() == VK_ESCAPE && pressed) {
final int row = getSelectedRow();
if (row != -1 && isRowEmpty(row)) {
final int count = model.getRowCount();
model.removeRow(row);
int newRow = count == row + 1 ? row - 1 : row;
if (0 <= newRow && newRow < model.getRowCount()) {
setRowSelectionInterval(newRow, newRow);
}
}
}
if (e.getKeyCode() == VK_ENTER) {
if (e.getID() == KEY_PRESSED) {
if (!isEditing() && e.getModifiers() == 0) {
editCellAt(getSelectedRow(), getSelectedColumn());
}
else if (isEditing()) {
TableUtil.stopEditing(this);
if (e.isControlDown() || e.isMetaDown()) {
return false;
}
else {
final int row = getSelectedRow() + 1;
if (row < getRowCount()) {
getSelectionModel().setSelectionInterval(row, row);
}
}
}
else {
if (e.isControlDown() || e.isMetaDown()) {
return false;
}
}
}
e.consume();
return true;
}
if (isEditing() && e.getKeyCode() == VK_TAB) {
if (pressed) {
final KeyboardFocusManager mgr = KeyboardFocusManager.getCurrentKeyboardFocusManager();
if (e.isShiftDown()) {
mgr.focusPreviousComponent();
}
else {
mgr.focusNextComponent();
}
}
return true;
}
final boolean isUp = e.getKeyCode() == VK_UP;
final boolean isDown = e.getKeyCode() == VK_DOWN;
if (isEditing() && (isUp || isDown) && e.getModifiers() == 0 && e.getID() == KEY_PRESSED) {
int row = getSelectedRow();
super.processKeyBinding(ks, e, condition, pressed);
if (!isEditing() && row != getSelectedRow()) {
TableUtil.editCellAt(this, getSelectedRow(), 0);
e.consume();
return true;
}
}
return super.processKeyBinding(ks, e, condition, pressed);
}
@Override
public TableCellEditor getCellEditor(final int row, int column) {
final JBTableRowEditor editor = getRowEditor(row);
if (editor != null) {
editor.setMouseEvent(myMouseEvent);
editor.prepareEditor(t, row);
installPaddingAndBordersForEditors(editor);
editor.setFocusCycleRoot(true);
editor.setFocusTraversalPolicy(new JBListTableFocusTraversalPolicy(editor));
MouseSuppressor.install(editor);
myCellEditor = new MyCellEditor(editor);
return myCellEditor;
}
myCellEditor = null;
return myCellEditor;
}
@Override
public Component prepareEditor(TableCellEditor editor, int row, int column) {
Object value = getValueAt(row, column);
boolean isSelected = isCellSelected(row, column);
return editor.getTableCellEditorComponent(this, value, isSelected, row, column);
}
};
mainTable.setStriped(true);
}
public void stopEditing() {
TableUtil.stopEditing(mainTable);
}
private static void installPaddingAndBordersForEditors(JBTableRowEditor editor) {
final List<EditorTextField> editors = UIUtil.findComponentsOfType(editor, EditorTextField.class);
for (EditorTextField textField : editors) {
textField.putClientProperty("JComboBox.isTableCellEditor", Boolean.FALSE);
textField.putClientProperty("JBListTable.isTableCellEditor", Boolean.TRUE);
}
}
public final JBTable getTable() {
return mainTable;
}
protected abstract JComponent getRowRenderer(JTable table, int row, boolean selected, boolean focused);
protected abstract JBTableRowEditor getRowEditor(int row);
protected JBTableRow getRowAt(final int row) {
return new JBTableRow() {
@Override
public Object getValueAt(int column) {
return myInternalTable.getValueAt(row, column);
}
};
}
protected boolean isRowEditable(int row) {
return true;
}
protected boolean isRowEmpty(int row) {
return false;
}
public static JComponent createEditorTextFieldPresentation(final Project project, final FileType type, final String text, boolean selected, boolean focused) {
final JPanel panel = new JPanel(new BorderLayout());
final EditorTextField field = new EditorTextField(text, project, type) {
@Override
protected boolean shouldHaveBorder() {
return false;
}
};
Font font = EditorColorsManager.getInstance().getGlobalScheme().getFont(EditorFontType.PLAIN);
font = new Font(font.getFontName(), font.getStyle(), 12);
field.setFont(font);
field.addSettingsProvider(EditorSettingsProvider.NO_WHITESPACE);
if (selected && focused) {
panel.setBackground(UIUtil.getTableSelectionBackground());
field.setAsRendererWithSelection(UIUtil.getTableSelectionBackground(), UIUtil.getTableSelectionForeground());
}
else {
panel.setBackground(UIUtil.getTableBackground());
if (selected) {
panel.setBorder(new DottedBorder(UIUtil.getTableForeground()));
}
}
panel.add(field, BorderLayout.WEST);
return panel;
}
private static class RowResizeAnimator extends Thread {
private final JTable myTable;
private final int myRow;
private final JScrollPane myScrollPane;
private int neededHeight;
private final JBTableRowEditor myEditor;
private final Ref<Integer> myIndex;
private int step = 5;
private int currentHeight;
private RowResizeAnimator(JTable table, int row, int height, JBTableRowEditor editor, @NotNull Ref<Integer> index) {
super("Row Animator");
myTable = table;
myRow = row;
neededHeight = height;
myEditor = editor;
myIndex = index;
currentHeight = myTable.getRowHeight(myRow);
myScrollPane = UIUtil.getParentOfType(JScrollPane.class, myTable);
}
@Override
public void run() {
final boolean exitEditing = currentHeight > neededHeight;
try {
sleep(50);
final JScrollBar bar = myScrollPane.getVerticalScrollBar();
if (bar == null || !bar.isVisible()) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
myScrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER);
}
});
sleep(15);
}
while (currentHeight != neededHeight) {
if (Math.abs(currentHeight - neededHeight) < step) {
currentHeight = neededHeight;
}
else {
currentHeight += currentHeight < neededHeight ? step : -step;
}
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
myTable.setRowHeight(myRow, currentHeight);
}
});
sleep(15);
}
if (myEditor != null) {
JComponent[] components = myEditor.getFocusableComponents();
JComponent focus = null;
if (myIndex.get() != null) {
int index = myIndex.get().intValue();
if (0 <= index && index < components.length) {
focus = components[index];
}
}
if (focus == null) {
focus = myEditor.getPreferredFocusedComponent();
}
if (focus != null) {
focus.requestFocus();
}
}
}
catch (InterruptedException ignore) {
}
finally {
//noinspection SSBasedInspection
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
TableUtil.scrollSelectionToVisible(myTable);
if (exitEditing && !myTable.isEditing()) {
myScrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED);
}
}
});
}
}
}
private class MyCellEditor extends AbstractTableCellEditor implements Animated {
JTable curTable;
private final JBTableRowEditor myEditor;
public MyCellEditor(JBTableRowEditor editor) {
myEditor = editor;
curTable = null;
}
@Override
public Component getTableCellEditorComponent(final JTable table, Object value, boolean isSelected, final int row, int column) {
curTable = table;
final JPanel p = new JPanel(new BorderLayout()) {
@Override
public void addNotify() {
super.addNotify();
final int height = (int)getPreferredSize().getHeight();
if (height > table.getRowHeight(row)) {
new RowResizeAnimator(table, row, height, myEditor, myMouseEvent == null ? myLastEditorIndex : Ref.<Integer>create(null)).start();
}
}
@Override
public void removeNotify() {
if (myCellEditor != null) myCellEditor.saveFocusIndex();
super.removeNotify();
new RowResizeAnimator(table, row, table.getRowHeight(), null, myMouseEvent == null ? myLastEditorIndex : Ref.<Integer>create(null)).start();
}
};
p.add(myEditor, BorderLayout.CENTER);
p.setFocusable(false);
return p;
}
@Override
public Object getCellEditorValue() {
return myEditor.getValue();
}
@Override
public boolean stopCellEditing() {
saveFocusIndex();
return super.stopCellEditing();
}
private void removeEmptyRow() {
final int row = curTable.getSelectedRow();
if (row != -1 && isRowEmpty(row)) {
final JBListTableModel model = (JBListTableModel)curTable.getModel();
final int count = model.getRowCount();
model.removeRow(row);
int newRow = count == row + 1 ? row - 1 : row;
curTable.setRowSelectionInterval(newRow, newRow);
}
}
public void saveFocusIndex() {
JComponent[] components = myEditor.getFocusableComponents();
for (int i = 0; i < components.length; i++) {
if (components[i].hasFocus()) {
JBListTable.this.myLastEditorIndex.set(i);
break;
}
}
}
@Override
public void cancelCellEditing() {
saveFocusIndex();
super.cancelCellEditing();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.cache.Cache;
import javax.cache.configuration.Factory;
import javax.cache.integration.CacheLoaderException;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.affinity.Affinity;
import org.apache.ignite.cache.store.CacheStore;
import org.apache.ignite.cache.store.CacheStoreAdapter;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.internal.processors.cache.store.CacheLocalStore;
import org.apache.ignite.internal.util.lang.GridAbsPredicate;
import org.apache.ignite.resources.IgniteInstanceResource;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.transactions.Transaction;
import org.apache.ignite.transactions.TransactionConcurrency;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC;
import static org.apache.ignite.internal.IgniteNodeAttributes.ATTR_IGNITE_INSTANCE_NAME;
import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ;
/**
*
*/
public abstract class CacheStoreUsageMultinodeAbstractTest extends GridCommonAbstractTest {
/** */
protected boolean client;
/** */
protected boolean cache;
/** */
protected boolean cacheStore;
/** */
protected boolean locStore;
/** */
protected boolean writeBehind;
/** */
protected boolean nearCache;
/** */
protected static Map<String, List<Cache.Entry<?, ?>>> writeMap;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
cfg.setClientMode(client);
if (cache)
cfg.setCacheConfiguration(cacheConfiguration());
return cfg;
}
/**
* @return Cache configuration.
*/
@SuppressWarnings("unchecked")
protected CacheConfiguration cacheConfiguration() {
CacheConfiguration ccfg = new CacheConfiguration(DEFAULT_CACHE_NAME);
ccfg.setCacheMode(PARTITIONED);
ccfg.setAtomicityMode(atomicityMode());
ccfg.setBackups(1);
ccfg.setWriteSynchronizationMode(FULL_SYNC);
if (cacheStore) {
if (writeBehind) {
ccfg.setWriteBehindEnabled(true);
ccfg.setWriteBehindFlushFrequency(100);
}
ccfg.setWriteThrough(true);
ccfg.setCacheStoreFactory(locStore ? new TestLocalStoreFactory() : new TestStoreFactory());
}
if (nearCache)
ccfg.setNearConfiguration(new NearCacheConfiguration());
return ccfg;
}
/**
* @return Cache atomicity mode.
*/
protected abstract CacheAtomicityMode atomicityMode();
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
super.beforeTest();
writeMap = new HashMap<>();
}
/**
* @param clientStore {@code True} if store configured on client node.
* @throws Exception If failed.
*/
protected void checkStoreUpdate(boolean clientStore) throws Exception {
Ignite client = grid(3);
assertTrue(client.configuration().isClientMode());
awaitPartitionMapExchange();
IgniteCache<Object, Object> cache0 = ignite(0).cache(DEFAULT_CACHE_NAME);
IgniteCache<Object, Object> cache1 = ignite(1).cache(DEFAULT_CACHE_NAME);
IgniteCache<Object, Object> clientCache = client.cache(DEFAULT_CACHE_NAME);
assertTrue(((IgniteCacheProxy)cache0).context().store().configured());
assertEquals(clientStore, ((IgniteCacheProxy) clientCache).context().store().configured());
List<TransactionConcurrency> tcList = new ArrayList<>();
tcList.add(null);
if (atomicityMode() == TRANSACTIONAL) {
tcList.add(TransactionConcurrency.OPTIMISTIC);
tcList.add(TransactionConcurrency.PESSIMISTIC);
}
log.info("Start test [atomicityMode=" + atomicityMode() +
", locStore=" + locStore +
", writeBehind=" + writeBehind +
", nearCache=" + nearCache +
", clientStore=" + clientStore + ']');
for (TransactionConcurrency tc : tcList) {
testStoreUpdate(cache0, primaryKey(cache0), tc);
testStoreUpdate(cache0, backupKey(cache0), tc);
testStoreUpdate(cache0, nearKey(cache0), tc);
testStoreUpdate(cache0, primaryKey(cache1), tc);
testStoreUpdate(clientCache, primaryKey(cache0), tc);
testStoreUpdate(clientCache, primaryKey(cache1), tc);
}
}
/**
* @param cache Cache.
* @param key Key.
* @param tc Transaction concurrency mode.
* @throws Exception If failed.
*/
protected void testStoreUpdate(IgniteCache<Object, Object> cache,
Object key,
@Nullable TransactionConcurrency tc)
throws Exception
{
boolean storeOnPrimary = atomicityMode() == ATOMIC || locStore || writeBehind;
assertTrue(writeMap.isEmpty());
Ignite ignite = cache.unwrap(Ignite.class);
Affinity<Object> obj = ignite.affinity(cache.getName());
ClusterNode node = obj.mapKeyToNode(key);
assertNotNull(node);
String expNode = storeOnPrimary ? (String)node.attribute(ATTR_IGNITE_INSTANCE_NAME) : ignite.name();
assertNotNull(expNode);
log.info("Put [node=" + ignite.name() +
", key=" + key +
", primary=" + node.attribute(ATTR_IGNITE_INSTANCE_NAME) +
", tx=" + tc +
", nearCache=" + (cache.getConfiguration(CacheConfiguration.class).getNearConfiguration() != null) +
", storeOnPrimary=" + storeOnPrimary + ']');
Transaction tx = tc != null ? ignite.transactions().txStart(tc, REPEATABLE_READ) : null;
try {
cache.put(key, key);
if (tx != null)
tx.commit();
}
finally {
if (tx != null)
tx.close();
}
boolean wait = GridTestUtils.waitForCondition(new GridAbsPredicate() {
@Override
public boolean apply() {
return !writeMap.isEmpty();
}
}, 1000);
assertTrue("Store is not updated", wait);
assertEquals("Write on wrong node: " + writeMap, locStore ? 2 : 1, writeMap.size());
if (!locStore)
assertEquals(expNode, writeMap.keySet().iterator().next());
writeMap.clear();
}
/**
*
*/
public static class TestStoreFactory implements Factory<CacheStore> {
/** {@inheritDoc} */
@Override public CacheStore create() {
return new TestStore();
}
}
/**
*
*/
public static class TestLocalStoreFactory implements Factory<CacheStore> {
/** {@inheritDoc} */
@Override public CacheStore create() {
return new TestLocalStore();
}
}
/**
*
*/
public static class TestStore extends CacheStoreAdapter<Object, Object> {
/** */
@IgniteInstanceResource
private Ignite ignite;
/** {@inheritDoc} */
@SuppressWarnings("SynchronizeOnNonFinalField")
@Override public void write(Cache.Entry<?, ?> entry) {
synchronized (writeMap) {
ignite.log().info("Write [node=" + ignite.name() + ", entry=" + entry + ']');
String name = ignite.name();
List<Cache.Entry<?, ?>> list = writeMap.get(name);
if (list == null) {
list = new ArrayList<>();
writeMap.put(name, list);
}
list.add(entry);
}
}
/** {@inheritDoc} */
@Override public Object load(Object key) throws CacheLoaderException {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public void delete(Object key) {
throw new UnsupportedOperationException();
}
}
/**
*
*/
@CacheLocalStore
public static class TestLocalStore extends TestStore {
/** {@inheritDoc} */
@Override public void delete(Object key) {
// No-op.
}
}
}
| |
/*
* Copyright (C) 2009 JavaRosa
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.javarosa.core.model.utils;
import org.javarosa.core.model.data.DateData;
import org.javarosa.core.model.data.DateTimeData;
import org.javarosa.core.model.data.IAnswerData;
import org.javarosa.core.model.data.StringData;
import org.javarosa.core.model.instance.TreeElement;
import org.javarosa.core.services.PropertyManager;
import org.javarosa.core.util.Map;
import org.javarosa.core.util.PropertyUtils;
import java.util.Date;
import java.util.List;
/**
* The Question Preloader is responsible for maintaining a set of handlers which are capable
* of parsing 'preload' elements, and their parameters, and returning IAnswerData objects.
*
* @author Clayton Sims
*
*/
public class QuestionPreloader {
/* String -> IPreloadHandler */
// NOTE: this is not java.util.Map!!!
private Map<String,IPreloadHandler> preloadHandlers;
/**
* Creates a new Preloader with default handlers
*/
public QuestionPreloader() {
preloadHandlers = new Map<String,IPreloadHandler>();
initPreloadHandlers();
}
/**
* Initializes the default preload handlers
*/
private void initPreloadHandlers() {
IPreloadHandler date = new IPreloadHandler() {
public String preloadHandled() {
return "date";
}
public IAnswerData handlePreload(String preloadParams) {
return preloadDate(preloadParams);
}
public boolean handlePostProcess(TreeElement node, String params) {
//do nothing
return false;
}
};
IPreloadHandler property = new IPreloadHandler() {
public String preloadHandled() {
return "property";
}
public IAnswerData handlePreload(String preloadParams) {
return preloadProperty(preloadParams);
}
public boolean handlePostProcess(TreeElement node, String params) {
saveProperty(params, node);
return false;
}
};
IPreloadHandler timestamp = new IPreloadHandler() {
public String preloadHandled() {
return "timestamp";
}
public IAnswerData handlePreload(String preloadParams) {
return ("start".equals(preloadParams) ? getTimestamp() : null);
}
public boolean handlePostProcess(TreeElement node, String params) {
if ("end".equals(params)) {
node.setAnswer(getTimestamp());
return true;
} else {
return false;
}
}
};
IPreloadHandler uid = new IPreloadHandler() {
public String preloadHandled() {
return "uid";
}
public IAnswerData handlePreload(String preloadParams) {
return new StringData(PropertyUtils.genGUID(25));
}
public boolean handlePostProcess(TreeElement node, String params) {
return false;
}
};
/*
//TODO: Finish this up.
IPreloadHandler meta = new IPreloadHandler() {
public String preloadHandled() {
return "meta";
}
public IAnswerData handlePreload(String preloadParams) {
//TODO: Ideally, we want to handle this preloader by taking in the
//existing structure. Resultantly, we don't want to mess with this.
//We should be enforcing that we don't.
return null;
}
public boolean handlePostProcess(TreeElement node, String params) {
List kids = node.getChildren();
Enumeration en = kids.elements();
while(en.hasMoreElements()) {
TreeElement kid = (TreeElement)en.nextElement();
if(kid.getName().equals("uid")) {
kid.setValue(new StringData(PropertyUtils.genGUID(25)));
}
}
return true;
}
};
*/
addPreloadHandler(date);
addPreloadHandler(property);
addPreloadHandler(timestamp);
addPreloadHandler(uid);
}
/**
* Adds a new preload handler to this preloader.
*
* @param handler an IPreloadHandler that can handle a preload type
*/
public void addPreloadHandler(IPreloadHandler handler) {
preloadHandlers.put(handler.preloadHandled(), handler);
}
/**
* Returns the IAnswerData preload value for the given preload type and parameters
*
* @param preloadType The type of the preload to be returned
* @param preloadParams Parameters for the preload handler
* @return An IAnswerData corresponding to a pre-loaded value for the given
* Arguments. null if no preload could successfully be derived due to either
* the lack of a handler, or to invalid parameters
*/
public IAnswerData getQuestionPreload(String preloadType, String preloadParams) {
IPreloadHandler handler = preloadHandlers.get(preloadType);
if(handler != null) {
return handler.handlePreload(preloadParams);
} else {
System.err.println("Do not know how to handle preloader [" + preloadType + "]");
return null;
}
}
public boolean questionPostProcess (TreeElement node, String preloadType, String params) {
IPreloadHandler handler = preloadHandlers.get(preloadType);
if(handler != null) {
return handler.handlePostProcess(node, params);
} else {
System.err.println("Do not know how to handle preloader [" + preloadType + "]");
return false;
}
}
/**
* Preloads a DateData object for the preload type 'date'
*
* @param preloadParams The parameters determining the date
* @return A preload date value if the parameters can be parsed,
* null otherwise
*/
private IAnswerData preloadDate(String preloadParams) {
Date d = null;
if (preloadParams.equals("today")) {
d = new Date();
} else if (preloadParams.substring(0, 11).equals("prevperiod-")) {
List<String> v = DateUtils.split(preloadParams.substring(11), "-", false);
String[] params = new String[v.size()];
for (int i = 0; i < params.length; i++)
params[i] = v.get(i);
try {
String type = params[0];
String start = params[1];
boolean beginning;
if (params[2].equals("head")) beginning = true;
else if (params[2].equals("tail")) beginning = false;
else throw new RuntimeException();
boolean includeToday;
if (params.length >= 4) {
if (params[3].equals("x")) includeToday = true;
else if (params[3].equals("")) includeToday = false;
else throw new RuntimeException();
} else {
includeToday = false;
}
int nAgo;
if (params.length >= 5) {
nAgo = Integer.parseInt(params[4]);
} else {
nAgo = 1;
}
d = DateUtils.getPastPeriodDate(new Date(), type, start, beginning, includeToday, nAgo);
} catch (Exception e) {
throw new IllegalArgumentException("invalid preload params for preload mode 'date'");
}
}
DateData data = new DateData(d);
return data;
}
/**
* Preloads a StringData object for the preload type 'property'
*
* @param preloadParams The parameters determining the property to be retrieved
* @return A preload property value if the parameters can be parsed,
* null otherwise
*/
private IAnswerData preloadProperty(String preloadParams) {
String propname = preloadParams;
String propval = PropertyManager._().getSingularProperty(propname);
StringData data = null;
if (propval != null && propval.length() > 0) {
data = new StringData(propval);
}
return data;
}
private void saveProperty (String propName, TreeElement node) {
IAnswerData answer = node.getValue();
String value = (answer == null ? null : answer.getDisplayText());
if (propName != null && propName.length() > 0 && value != null && value.length() > 0)
PropertyManager._().setProperty(propName, value);
}
private DateTimeData getTimestamp() {
return new DateTimeData(new Date());
}
}
| |
/*
* Copyright 2010-2013 Ning, Inc.
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.subscription.alignment;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import javax.annotation.Nullable;
import javax.inject.Inject;
import org.joda.time.DateTime;
import org.killbill.billing.ErrorCode;
import org.killbill.billing.callcontext.InternalTenantContext;
import org.killbill.billing.catalog.api.CatalogApiException;
import org.killbill.billing.catalog.api.Duration;
import org.killbill.billing.catalog.api.PhaseType;
import org.killbill.billing.catalog.api.Plan;
import org.killbill.billing.catalog.api.PlanAlignmentChange;
import org.killbill.billing.catalog.api.PlanAlignmentCreate;
import org.killbill.billing.catalog.api.PlanPhase;
import org.killbill.billing.catalog.api.PlanPhaseSpecifier;
import org.killbill.billing.catalog.api.PlanSpecifier;
import org.killbill.billing.entitlement.api.Entitlement.EntitlementState;
import org.killbill.billing.subscription.api.user.DefaultSubscriptionBase;
import org.killbill.billing.subscription.api.user.SubscriptionBaseApiException;
import org.killbill.billing.subscription.api.user.SubscriptionBaseTransition;
import org.killbill.billing.subscription.catalog.SubscriptionCatalog;
import org.killbill.billing.subscription.exceptions.SubscriptionBaseError;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
/**
* PlanAligner offers specific APIs to return the correct {@code TimedPhase} when creating, changing Plan or to compute
* next Phase on current Plan.
*/
public class PlanAligner extends BaseAligner {
@Inject
public PlanAligner() {
}
/**
* Returns the current and next phase for the subscription in creation
*
* @param alignStartDate the subscription (align) startDate for the subscription
* @param bundleStartDate the bundle startDate used alignment
* @param plan the current Plan
* @param initialPhase the initialPhase on which we should create that subscription. can be null
* @param priceList the priceList
* @param effectiveDate the effective creation date (driven by the catalog policy, i.e. when the creation occurs)
* @return the current and next phases
* @throws CatalogApiException for catalog errors
* @throws org.killbill.billing.subscription.api.user.SubscriptionBaseApiException for subscription errors
*/
public TimedPhase[] getCurrentAndNextTimedPhaseOnCreate(final DateTime alignStartDate,
final DateTime bundleStartDate,
final Plan plan,
@Nullable final PhaseType initialPhase,
final String priceList,
final DateTime effectiveDate,
final SubscriptionCatalog catalog,
final InternalTenantContext context) throws CatalogApiException, SubscriptionBaseApiException {
final List<TimedPhase> timedPhases = getTimedPhaseOnCreate(alignStartDate,
bundleStartDate,
plan,
initialPhase,
catalog,
effectiveDate,
context);
final TimedPhase[] result = new TimedPhase[2];
result[0] = getTimedPhase(timedPhases, effectiveDate, WhichPhase.CURRENT);
result[1] = getTimedPhase(timedPhases, effectiveDate, WhichPhase.NEXT);
return result;
}
/**
* Returns current Phase for that Plan change
*
* @param subscription the subscription in change (only start date, bundle start date, current phase, plan and pricelist
* are looked at)
* @param plan the current Plan
* @param effectiveDate the effective change date (driven by the catalog policy, i.e. when the change occurs)
* @param newPlanInitialPhaseType the phase on which to start when switching to new plan
* @return the current phase
* @throws CatalogApiException for catalog errors
* @throws org.killbill.billing.subscription.api.user.SubscriptionBaseApiException for subscription errors
*/
public TimedPhase getCurrentTimedPhaseOnChange(final DefaultSubscriptionBase subscription,
final Plan plan,
final DateTime effectiveDate,
final PhaseType newPlanInitialPhaseType,
final SubscriptionCatalog catalog,
final InternalTenantContext context) throws CatalogApiException, SubscriptionBaseApiException {
return getTimedPhaseOnChange(subscription, plan, effectiveDate, newPlanInitialPhaseType, WhichPhase.CURRENT, catalog, context);
}
/**
* Returns next Phase for that Plan change
*
* @param subscription the subscription in change (only start date, bundle start date, current phase, plan and pricelist
* are looked at)
* @param plan the current Plan
* @param effectiveDate the effective change date (driven by the catalog policy, i.e. when the change occurs)
* @param newPlanInitialPhaseType the phase on which to start when switching to new plan
* @return the next phase
* @throws CatalogApiException for catalog errors
* @throws org.killbill.billing.subscription.api.user.SubscriptionBaseApiException for subscription errors
*/
public TimedPhase getNextTimedPhaseOnChange(final DefaultSubscriptionBase subscription,
final Plan plan,
final DateTime effectiveDate,
final PhaseType newPlanInitialPhaseType,
final SubscriptionCatalog catalog,
final InternalTenantContext context) throws CatalogApiException, SubscriptionBaseApiException {
return getTimedPhaseOnChange(subscription, plan, effectiveDate, newPlanInitialPhaseType, WhichPhase.NEXT, catalog, context);
}
/**
* Returns next Phase for that SubscriptionBase at a point in time
*
* @param subscription the subscription for which we need to compute the next Phase event
* @return the next phase
*/
public TimedPhase getNextTimedPhase(final DefaultSubscriptionBase subscription, final DateTime effectiveDate, final SubscriptionCatalog catalog, final InternalTenantContext context) {
try {
final SubscriptionBaseTransition pendingOrLastPlanTransition;
if (subscription.getState() == EntitlementState.PENDING) {
pendingOrLastPlanTransition = subscription.getPendingTransition();
} else {
pendingOrLastPlanTransition = subscription.getLastTransitionForCurrentPlan();
}
switch (pendingOrLastPlanTransition.getTransitionType()) {
// If we never had any Plan change, borrow the logic for createPlan alignment
case CREATE:
case TRANSFER:
final List<TimedPhase> timedPhases = getTimedPhaseOnCreate(subscription.getAlignStartDate(),
subscription.getBundleStartDate(),
pendingOrLastPlanTransition.getNextPlan(),
pendingOrLastPlanTransition.getNextPhase().getPhaseType(),
catalog,
pendingOrLastPlanTransition.getEffectiveTransitionTime(),
context);
return getTimedPhase(timedPhases, effectiveDate, WhichPhase.NEXT);
case CHANGE:
return getTimedPhaseOnChange(subscription.getAlignStartDate(),
subscription.getBundleStartDate(),
pendingOrLastPlanTransition.getPreviousPhase(),
pendingOrLastPlanTransition.getPreviousPlan(),
pendingOrLastPlanTransition.getNextPlan(),
effectiveDate,
pendingOrLastPlanTransition.getEffectiveTransitionTime(),
pendingOrLastPlanTransition.getEffectiveTransitionTime(),
subscription.getAllTransitions().get(0).getNextPhase().getPhaseType(),
null,
WhichPhase.NEXT,
catalog,
context);
default:
throw new SubscriptionBaseError(String.format("Unexpected initial transition %s for current plan %s on subscription %s",
pendingOrLastPlanTransition.getTransitionType(), subscription.getCurrentPlan(), subscription.getId()));
}
} catch (Exception /* SubscriptionBaseApiException, CatalogApiException */ e) {
throw new SubscriptionBaseError(String.format("Could not compute next phase change for subscription %s", subscription.getId()), e);
}
}
private List<TimedPhase> getTimedPhaseOnCreate(final DateTime subscriptionStartDate,
final DateTime bundleStartDate,
final Plan plan,
@Nullable final PhaseType initialPhase,
final SubscriptionCatalog catalog,
final DateTime catalogEffectiveDate,
final InternalTenantContext context)
throws CatalogApiException, SubscriptionBaseApiException {
final PlanSpecifier planSpecifier = new PlanSpecifier(plan.getName());
final DateTime planStartDate;
final PlanAlignmentCreate alignment = catalog.planCreateAlignment(planSpecifier, catalogEffectiveDate, subscriptionStartDate);
switch (alignment) {
case START_OF_SUBSCRIPTION:
planStartDate = subscriptionStartDate;
break;
case START_OF_BUNDLE:
planStartDate = bundleStartDate;
break;
default:
throw new SubscriptionBaseError(String.format("Unknown PlanAlignmentCreate %s", alignment));
}
return getPhaseAlignments(plan, initialPhase, planStartDate);
}
private TimedPhase getTimedPhaseOnChange(final DefaultSubscriptionBase subscription,
final Plan nextPlan,
final DateTime effectiveDate,
final PhaseType newPlanInitialPhaseType,
final WhichPhase which,
final SubscriptionCatalog catalog,
final InternalTenantContext context) throws CatalogApiException, SubscriptionBaseApiException {
final SubscriptionBaseTransition pendingOrLastPlanTransition;
if (subscription.getState() == EntitlementState.PENDING) {
pendingOrLastPlanTransition = subscription.getPendingTransition();
} else {
pendingOrLastPlanTransition = subscription.getLastTransitionForCurrentPlan();
}
return getTimedPhaseOnChange(subscription.getAlignStartDate(),
subscription.getBundleStartDate(),
pendingOrLastPlanTransition.getNextPhase(),
pendingOrLastPlanTransition.getNextPlan(),
nextPlan,
effectiveDate,
effectiveDate,
// This method is only called while doing the change, hence we want to pass the change effective date
effectiveDate,
subscription.getAllTransitions().get(0).getNextPhase().getPhaseType(),
newPlanInitialPhaseType,
which,
catalog,
context);
}
private TimedPhase getTimedPhaseOnChange(final DateTime subscriptionStartDate,
final DateTime bundleStartDate,
final PlanPhase currentPhase,
final Plan currentPlan,
final Plan nextPlan,
final DateTime effectiveDate,
final DateTime catalogEffectiveDate,
final DateTime lastOrCurrentChangeEffectiveDate,
final PhaseType originalInitialPhase,
@Nullable final PhaseType newPlanInitialPhaseType,
final WhichPhase which,
final SubscriptionCatalog catalog,
final InternalTenantContext context) throws CatalogApiException, SubscriptionBaseApiException {
final PlanPhaseSpecifier fromPlanPhaseSpecifier = new PlanPhaseSpecifier(currentPlan.getName(),
currentPhase.getPhaseType());
final PlanSpecifier toPlanSpecifier = new PlanSpecifier(nextPlan.getName());
final PhaseType initialPhase;
final DateTime planStartDate;
final PlanAlignmentChange alignment = catalog.getPlanChangeResult(fromPlanPhaseSpecifier, toPlanSpecifier, catalogEffectiveDate).getAlignment();
switch (alignment) {
case START_OF_SUBSCRIPTION:
planStartDate = subscriptionStartDate;
initialPhase = newPlanInitialPhaseType != null ? newPlanInitialPhaseType :
(isPlanContainPhaseType(nextPlan, originalInitialPhase) ? originalInitialPhase : null);
break;
case START_OF_BUNDLE:
planStartDate = bundleStartDate;
initialPhase = newPlanInitialPhaseType != null ? newPlanInitialPhaseType :
(isPlanContainPhaseType(nextPlan, originalInitialPhase) ? originalInitialPhase : null);
break;
case CHANGE_OF_PLAN:
planStartDate = lastOrCurrentChangeEffectiveDate;
initialPhase = newPlanInitialPhaseType;
break;
case CHANGE_OF_PRICELIST:
throw new SubscriptionBaseError(String.format("Not implemented yet %s", alignment));
default:
throw new SubscriptionBaseError(String.format("Unknown PlanAlignmentChange %s", alignment));
}
final List<TimedPhase> timedPhases = getPhaseAlignments(nextPlan, initialPhase, planStartDate);
return getTimedPhase(timedPhases, effectiveDate, which);
}
private List<TimedPhase> getPhaseAlignments(final Plan plan, @Nullable final PhaseType initialPhase, final DateTime initialPhaseStartDate) throws SubscriptionBaseApiException {
if (plan == null) {
return Collections.emptyList();
}
final List<TimedPhase> result = new LinkedList<TimedPhase>();
DateTime curPhaseStart = (initialPhase == null) ? initialPhaseStartDate : null;
DateTime nextPhaseStart;
for (final PlanPhase cur : plan.getAllPhases()) {
// For create we can specify the phase so skip any phase until we reach initialPhase
if (curPhaseStart == null) {
if (initialPhase != cur.getPhaseType()) {
continue;
}
curPhaseStart = initialPhaseStartDate;
}
result.add(new TimedPhase(cur, curPhaseStart));
// STEPH check for duration null instead TimeUnit UNLIMITED
if (cur.getPhaseType() != PhaseType.EVERGREEN) {
final Duration curPhaseDuration = cur.getDuration();
nextPhaseStart = addDuration(curPhaseStart, curPhaseDuration);
if (nextPhaseStart == null) {
throw new SubscriptionBaseError(String.format("Unexpected non ending UNLIMITED phase for plan %s",
plan.getName()));
}
curPhaseStart = nextPhaseStart;
}
}
if (initialPhase != null && curPhaseStart == null) {
throw new SubscriptionBaseApiException(ErrorCode.SUB_CREATE_BAD_PHASE, initialPhase);
}
return result;
}
// STEPH check for non evergreen Plans and what happens
private TimedPhase getTimedPhase(final List<TimedPhase> timedPhases, final DateTime effectiveDate, final WhichPhase which) {
TimedPhase cur = null;
TimedPhase next = null;
for (final TimedPhase phase : timedPhases) {
if (phase.getStartPhase().isAfter(effectiveDate)) {
next = phase;
break;
}
cur = phase;
}
switch (which) {
case CURRENT:
return cur;
case NEXT:
return next;
default:
throw new SubscriptionBaseError(String.format("Unexpected %s TimedPhase", which));
}
}
private boolean isPlanContainPhaseType(final Plan plan, @Nullable final PhaseType phaseType) {
return Iterables.any(ImmutableList.copyOf(plan.getAllPhases()), new Predicate<PlanPhase>() {
@Override
public boolean apply(final PlanPhase input) {
return input.getPhaseType() == phaseType;
}
});
}
private enum WhichPhase {
CURRENT,
NEXT
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver13;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFGroupModFailedErrorMsgVer13 implements OFGroupModFailedErrorMsg {
private static final Logger logger = LoggerFactory.getLogger(OFGroupModFailedErrorMsgVer13.class);
// version: 1.3
final static byte WIRE_VERSION = 4;
final static int MINIMUM_LENGTH = 12;
private final static long DEFAULT_XID = 0x0L;
private final static OFErrorCauseData DEFAULT_DATA = OFErrorCauseData.NONE;
// OF message fields
private final long xid;
private final OFGroupModFailedCode code;
private final OFErrorCauseData data;
//
// package private constructor - used by readers, builders, and factory
OFGroupModFailedErrorMsgVer13(long xid, OFGroupModFailedCode code, OFErrorCauseData data) {
if(code == null) {
throw new NullPointerException("OFGroupModFailedErrorMsgVer13: property code cannot be null");
}
if(data == null) {
throw new NullPointerException("OFGroupModFailedErrorMsgVer13: property data cannot be null");
}
this.xid = xid;
this.code = code;
this.data = data;
}
// Accessors for OF message fields
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
@Override
public OFType getType() {
return OFType.ERROR;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFErrorType getErrType() {
return OFErrorType.GROUP_MOD_FAILED;
}
@Override
public OFGroupModFailedCode getCode() {
return code;
}
@Override
public OFErrorCauseData getData() {
return data;
}
public OFGroupModFailedErrorMsg.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFGroupModFailedErrorMsg.Builder {
final OFGroupModFailedErrorMsgVer13 parentMessage;
// OF message fields
private boolean xidSet;
private long xid;
private boolean codeSet;
private OFGroupModFailedCode code;
private boolean dataSet;
private OFErrorCauseData data;
BuilderWithParent(OFGroupModFailedErrorMsgVer13 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
@Override
public OFType getType() {
return OFType.ERROR;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFGroupModFailedErrorMsg.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public OFErrorType getErrType() {
return OFErrorType.GROUP_MOD_FAILED;
}
@Override
public OFGroupModFailedCode getCode() {
return code;
}
@Override
public OFGroupModFailedErrorMsg.Builder setCode(OFGroupModFailedCode code) {
this.code = code;
this.codeSet = true;
return this;
}
@Override
public OFErrorCauseData getData() {
return data;
}
@Override
public OFGroupModFailedErrorMsg.Builder setData(OFErrorCauseData data) {
this.data = data;
this.dataSet = true;
return this;
}
@Override
public OFGroupModFailedErrorMsg build() {
long xid = this.xidSet ? this.xid : parentMessage.xid;
OFGroupModFailedCode code = this.codeSet ? this.code : parentMessage.code;
if(code == null)
throw new NullPointerException("Property code must not be null");
OFErrorCauseData data = this.dataSet ? this.data : parentMessage.data;
if(data == null)
throw new NullPointerException("Property data must not be null");
//
return new OFGroupModFailedErrorMsgVer13(
xid,
code,
data
);
}
}
static class Builder implements OFGroupModFailedErrorMsg.Builder {
// OF message fields
private boolean xidSet;
private long xid;
private boolean codeSet;
private OFGroupModFailedCode code;
private boolean dataSet;
private OFErrorCauseData data;
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
@Override
public OFType getType() {
return OFType.ERROR;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFGroupModFailedErrorMsg.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public OFErrorType getErrType() {
return OFErrorType.GROUP_MOD_FAILED;
}
@Override
public OFGroupModFailedCode getCode() {
return code;
}
@Override
public OFGroupModFailedErrorMsg.Builder setCode(OFGroupModFailedCode code) {
this.code = code;
this.codeSet = true;
return this;
}
@Override
public OFErrorCauseData getData() {
return data;
}
@Override
public OFGroupModFailedErrorMsg.Builder setData(OFErrorCauseData data) {
this.data = data;
this.dataSet = true;
return this;
}
//
@Override
public OFGroupModFailedErrorMsg build() {
long xid = this.xidSet ? this.xid : DEFAULT_XID;
if(!this.codeSet)
throw new IllegalStateException("Property code doesn't have default value -- must be set");
if(code == null)
throw new NullPointerException("Property code must not be null");
OFErrorCauseData data = this.dataSet ? this.data : DEFAULT_DATA;
if(data == null)
throw new NullPointerException("Property data must not be null");
return new OFGroupModFailedErrorMsgVer13(
xid,
code,
data
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFGroupModFailedErrorMsg> {
@Override
public OFGroupModFailedErrorMsg readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property version == 4
byte version = bb.readByte();
if(version != (byte) 0x4)
throw new OFParseError("Wrong version: Expected=OFVersion.OF_13(4), got="+version);
// fixed value property type == 1
byte type = bb.readByte();
if(type != (byte) 0x1)
throw new OFParseError("Wrong type: Expected=OFType.ERROR(1), got="+type);
int length = U16.f(bb.readShort());
if(length < MINIMUM_LENGTH)
throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long xid = U32.f(bb.readInt());
// fixed value property errType == 6
short errType = bb.readShort();
if(errType != (short) 0x6)
throw new OFParseError("Wrong errType: Expected=OFErrorType.GROUP_MOD_FAILED(6), got="+errType);
OFGroupModFailedCode code = OFGroupModFailedCodeSerializerVer13.readFrom(bb);
OFErrorCauseData data = OFErrorCauseData.read(bb, length - (bb.readerIndex() - start), OFVersion.OF_13);
OFGroupModFailedErrorMsgVer13 groupModFailedErrorMsgVer13 = new OFGroupModFailedErrorMsgVer13(
xid,
code,
data
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", groupModFailedErrorMsgVer13);
return groupModFailedErrorMsgVer13;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFGroupModFailedErrorMsgVer13Funnel FUNNEL = new OFGroupModFailedErrorMsgVer13Funnel();
static class OFGroupModFailedErrorMsgVer13Funnel implements Funnel<OFGroupModFailedErrorMsgVer13> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFGroupModFailedErrorMsgVer13 message, PrimitiveSink sink) {
// fixed value property version = 4
sink.putByte((byte) 0x4);
// fixed value property type = 1
sink.putByte((byte) 0x1);
// FIXME: skip funnel of length
sink.putLong(message.xid);
// fixed value property errType = 6
sink.putShort((short) 0x6);
OFGroupModFailedCodeSerializerVer13.putTo(message.code, sink);
message.data.putTo(sink);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFGroupModFailedErrorMsgVer13> {
@Override
public void write(ByteBuf bb, OFGroupModFailedErrorMsgVer13 message) {
int startIndex = bb.writerIndex();
// fixed value property version = 4
bb.writeByte((byte) 0x4);
// fixed value property type = 1
bb.writeByte((byte) 0x1);
// length is length of variable message, will be updated at the end
int lengthIndex = bb.writerIndex();
bb.writeShort(U16.t(0));
bb.writeInt(U32.t(message.xid));
// fixed value property errType = 6
bb.writeShort((short) 0x6);
OFGroupModFailedCodeSerializerVer13.writeTo(bb, message.code);
message.data.writeTo(bb);
// update length field
int length = bb.writerIndex() - startIndex;
bb.setShort(lengthIndex, length);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFGroupModFailedErrorMsgVer13(");
b.append("xid=").append(xid);
b.append(", ");
b.append("code=").append(code);
b.append(", ");
b.append("data=").append(data);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFGroupModFailedErrorMsgVer13 other = (OFGroupModFailedErrorMsgVer13) obj;
if( xid != other.xid)
return false;
if (code == null) {
if (other.code != null)
return false;
} else if (!code.equals(other.code))
return false;
if (data == null) {
if (other.data != null)
return false;
} else if (!data.equals(other.data))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (xid ^ (xid >>> 32));
result = prime * result + ((code == null) ? 0 : code.hashCode());
result = prime * result + ((data == null) ? 0 : data.hashCode());
return result;
}
}
| |
package ru.istolbov.tracker;
import java.util.ArrayList;
/**
* Class MenuTracker.
* Created by istolbov on 21.01.2017.
*/
public class MenuTracker {
/**
* menuRange.
*/
private int[] menuRange;
/**
* input.
*/
private Input input;
/**
* tracker.
*/
private Tracker tracker;
/**
* actions.
*/
private ArrayList<UserAction> actions = new ArrayList<>();
/**
* constructor.
* @param input - input
* @param tracker - tracker
*/
public MenuTracker(Input input, Tracker tracker) {
this.input = input;
this.tracker = tracker;
}
/**
* fillActions.
*/
public void fillActions() {
this.actions.add(new AddItem("Add the new item."));
this.actions.add(new ShowItems("Show all items."));
this.actions.add(new EditItem("Edit the item."));
this.actions.add(new DeleteItem("Delete the item."));
this.actions.add(new FindItemByName("Find the item by name."));
this.actions.add(new FindItemByID("Find the item by id."));
}
/**
* select.
* @param key - key
*/
public void select(int key) {
this.actions.get(key).execute(this.input, this.tracker);
}
/**
* show.
*/
public void show() {
int count = 0;
for (UserAction action : this.actions) {
if (action != null) {
count++;
System.out.println(action.info());
}
}
this.menuRange = new int[count];
}
/**
* getMenuRange.
* @return menuRange
*/
public int[] getMenuRange() {
for (int i = 0; i < this.menuRange.length; i++) {
this.menuRange[i] = i;
}
return this.menuRange;
}
/**
* Class AddItem.
*/
private class AddItem extends BaseAction implements UserAction {
/**
* constructor.
* @param name - name
*/
AddItem(String name) {
super(name);
}
/**
* key.
* @return 0
*/
public int key() {
return 0;
}
/**
* execute.
* @param input - input
* @param tracker - tracker
*/
public void execute(Input input, Tracker tracker) {
String name = input.ask("Please, enter the task's name: ");
String desc = input.ask("Please, enter the task's desc: ");
tracker.add(new Task(name, desc));
}
}
/**
* Class ShowItems.
*/
private class ShowItems extends BaseAction implements UserAction {
/**
* constructor.
* @param name - name
*/
ShowItems(String name) {
super(name);
}
/**
* key.
* @return 1
*/
public int key() {
return 1;
}
/**
* execute.
* @param input - input
* @param tracker - tracker
*/
public void execute(Input input, Tracker tracker) {
for (Item item : tracker.findAll()) {
System.out.println(String.format("%s. %s", item.getId(), item.getName()));
}
}
}
/**
* Class EditItem.
*/
private class EditItem extends BaseAction implements UserAction {
/**
* constructor.
* @param name - name
*/
EditItem(String name) {
super(name);
}
/**
* key.
* @return 2
*/
public int key() {
return 2;
}
/**
* execute.
* @param input - input
* @param tracker - tracker
*/
public void execute(Input input, Tracker tracker) {
String id = input.ask("Please, enter the task's id: ");
if (tracker.findById(id) != null) {
String name = input.ask("Please, enter the task's name: ");
String desc = input.ask("Please, enter the task's desc: ");
Task task = new Task(name, desc);
task.setId(id);
tracker.update(task);
} else {
System.out.println("The task is not found.");
}
}
}
/**
* Class DeleteItem.
*/
private class DeleteItem extends BaseAction implements UserAction {
/**
* constructor.
* @param name - name
*/
DeleteItem(String name) {
super(name);
}
/**
* key.
* @return 3
*/
public int key() {
return 3;
}
/**
* execute.
* @param input - input
* @param tracker - tracker
*/
public void execute(Input input, Tracker tracker) {
String id = input.ask("Please, enter the task's id: ");
Item item = tracker.findById(id);
if (item != null) {
tracker.delete(tracker.findById(id));
} else {
System.out.println("The task is not found.");
}
}
}
/**
* Class FindItemByName.
*/
private class FindItemByName extends BaseAction implements UserAction {
/**
* constructor.
* @param name - name
*/
FindItemByName(String name) {
super(name);
}
/**
* key.
* @return 4
*/
public int key() {
return 4;
}
/**
* execute.
* @param input - input
* @param tracker - tracker
*/
public void execute(Input input, Tracker tracker) {
String name = input.ask("Please, enter the task's name: ");
for (Item item : tracker.findByName(name)) {
System.out.println(String.format("%s. %s", item.getId(), item.getName()));
}
}
}
/**
* Class FindItemByID.
*/
private class FindItemByID extends BaseAction implements UserAction {
/**
* constructor.
* @param name - name
*/
FindItemByID(String name) {
super(name);
}
/**
* key.
* @return 5
*/
public int key() {
return 5;
}
/**
* execute.
* @param input - input
* @param tracker - tracker
*/
public void execute(Input input, Tracker tracker) {
String id = input.ask("Please, enter the task's id: ");
Item item = tracker.findById(id);
if (item != null) {
System.out.println(String.format("%s. %s", item.getId(), item.getName()));
} else {
System.out.println("The task is not found.");
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.eviction;
import static org.apache.geode.internal.JvmSizeUtils.roundUpSize;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.io.File;
import java.util.Map;
import java.util.Properties;
import org.junit.After;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.EvictionAction;
import org.apache.geode.cache.EvictionAlgorithm;
import org.apache.geode.cache.EvictionAttributes;
import org.apache.geode.cache.PartitionAttributesFactory;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.util.ObjectSizer;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.internal.JvmSizeUtils;
import org.apache.geode.internal.cache.BucketRegion;
import org.apache.geode.internal.cache.CachedDeserializableFactory;
import org.apache.geode.internal.cache.PartitionedRegion;
import org.apache.geode.internal.cache.RegionMap;
import org.apache.geode.internal.cache.TestNonSizerObject;
import org.apache.geode.internal.cache.TestObjectSizerImpl;
import org.apache.geode.internal.cache.entries.AbstractLRURegionEntry;
import org.apache.geode.internal.size.Sizeable;
import org.apache.geode.logging.internal.OSProcess;
import org.apache.geode.test.dunit.LogWriterUtils;
import org.apache.geode.test.dunit.cache.CacheTestCase;
import org.apache.geode.test.junit.categories.EvictionTest;
@Category({EvictionTest.class})
@SuppressWarnings("serial")
public class EvictionObjectSizerDUnitTest extends CacheTestCase {
private static final int maxEnteries = 20;
private static final int maxSizeInMb = 20;
private static Cache cache;
private static Region region;
@After
public void tearDown() throws Exception {
if (cache != null) {
cache.close();
}
cache = null;
region = null;
}
/**
* Without object sizer
*/
@Test
public void testWithoutObjectSizerForHeapLRU() throws Exception {
prepareScenario(EvictionAlgorithm.LRU_HEAP, null);
// Size of overhead=
// 49(HeapLRUCapacityController)((PartitionedRegion)region).getEvictionController()).getPerEntryOverhead()
// Size of Integer key= 0 (it is inlined)
// Size of Byte Array(1 MB)= 1024 * 1024 + 8 (byte object) + 4 ( size of object) + 4 (rounded up
// to nearest word)
// = 1048592
// Total Size of each entry should be= 1048592
putData("PR1", 2, 1);
int keySize = 0;
int valueSize =
JvmSizeUtils.getObjectHeaderSize() + 4 /* array length */ + (1024 * 1024) /* bytes */;
valueSize = roundUpSize(valueSize);
int entrySize = keySize + valueSize
+ ((HeapLRUController) ((PartitionedRegion) region).getEvictionController())
.getPerEntryOverhead();
verifySize("PR1", 2, entrySize);
assertEquals(2 * entrySize,
((PartitionedRegion) region).getEvictionController().getCounters().getCounter());
}
/**
* With object sizer for standard objects.Key -Integer Value ByteArray
*
*/
@Test
public void testObjectSizerForHeapLRU_StandardObjects() throws Exception {
prepareScenario(EvictionAlgorithm.LRU_HEAP, new TestObjectSizerImpl());
// Size of overhead= 49
// Size of Integer key= 0(inlined)
// Size of Byte Array(1 MB) + overhead (16 bytes)= 1048592 + 16
// Total Size of each entry should be= 1048592
putData("PR1", 2, 1);
{
int keySize = 0;
int valueSize =
JvmSizeUtils.getObjectHeaderSize() + 4 /* array length */ + (1024 * 1024) /* bytes */;
valueSize = roundUpSize(valueSize);
int entrySize = keySize + valueSize
+ ((HeapLRUController) ((PartitionedRegion) region).getEvictionController())
.getPerEntryOverhead();
verifySize("PR1", 2, entrySize);
}
// Size of overhead= 49
// Size of Integer key= 0(inlined)
// Size of Byte Array(2 MB) + overhead= 2097152 + 16
// Total Size of each entry should be= 2097201
{
putData("PR1", 2, 2);
int keySize = 0;
int valueSize = JvmSizeUtils.getObjectHeaderSize() + 4 /* array length */
+ (1024 * 1024 * 2) /* bytes */;
valueSize = roundUpSize(valueSize);
int entrySize = keySize + valueSize
+ ((HeapLRUController) ((PartitionedRegion) region).getEvictionController())
.getPerEntryOverhead();
verifySize("PR1", 2, entrySize);
}
}
/**
* With object sizer for customized value object implementing ObjectSizer .Key -Integer Value
* TestNonSizerObject
*/
@Test
public void testObjectSizerForHeapLRU_CustomizedNonSizerObject() throws Exception {
prepareScenario(EvictionAlgorithm.LRU_HEAP, new TestObjectSizerImpl());
// Size of overhead= 49
// Size of Integer key= 0(inlined)
// Size of byte array 0 + size of overhead(16)
// Total Size of each entry should be= 54
putCustomizedData(1, new byte[0]);
{
int keySize = 0;
int valueSize = JvmSizeUtils.getObjectHeaderSize() + 4 /* array length */ + 0 /* bytes */;
valueSize = roundUpSize(valueSize);
int entrySize = keySize + valueSize
+ ((HeapLRUController) ((PartitionedRegion) region).getEvictionController())
.getPerEntryOverhead();
assertEquals(entrySize, getSizeOfCustomizedData(1));
}
// Size of overhead= 49
// Size of Integer key= 0(inlined)
// Size of byte array 4 + size of overhead(12)
// Total Size of each entry should be= 59
putCustomizedData(2, new byte[4]);
{
int keySize = 0;
int valueSize = JvmSizeUtils.getObjectHeaderSize() + 4 /* array length */ + 4 /* bytes */;
valueSize = roundUpSize(valueSize);
int entrySize = keySize + valueSize
+ ((HeapLRUController) ((PartitionedRegion) region).getEvictionController())
.getPerEntryOverhead();
assertEquals(entrySize, getSizeOfCustomizedData(2));
}
}
/**
* With object sizer for customized value object implementing ObjectSizer .Key -Integer Value
* TestObjectSizerImpl
*/
@Test
public void testObjectSizerForHeapLRU_CustomizedSizerObject() throws Exception {
prepareScenario(EvictionAlgorithm.LRU_HEAP, new TestObjectSizerImpl());
// Size of overhead= 49
// Size of Integer key= 0(inlined)
// Size of TestObjectSizerImpl= 160 (serialized size), changed to 156 because package changed to
// org.apache.geode
// Total Size of entry should be= 71
putCustomizedData(1, new TestObjectSizerImpl());
int expected = (0 + 156 + (Sizeable.PER_OBJECT_OVERHEAD * 2)
+ ((HeapLRUController) ((PartitionedRegion) region).getEvictionController())
.getPerEntryOverhead());
assertEquals(expected, getSizeOfCustomizedData(1));
assertEquals(expected,
((PartitionedRegion) region).getEvictionController().getCounters().getCounter());
}
/**
* With object sizer for customized key and value objects.
*/
@Test
public void testObjectSizerForHeapLRU_CustomizedSizerObjects() throws Exception {
prepareScenario(EvictionAlgorithm.LRU_HEAP, new TestObjectSizerImpl());
// Size of overhead= 49
// Size of TestNonSizerObject key= 1(customized)
// Size of TestObjectSizerImpl= 160 (serialized size), changed to 156 because package changed to
// org.apache.geode
// Total Size of entry should be= 72
putCustomizedObjects(new TestNonSizerObject("1"), new TestObjectSizerImpl());
int expected = (1 + 156 + (Sizeable.PER_OBJECT_OVERHEAD * 2)
+ ((HeapLRUController) ((PartitionedRegion) region).getEvictionController())
.getPerEntryOverhead());
assertEquals(expected, getSizeOfCustomizedObject(new TestNonSizerObject("1")));
assertEquals(expected,
((PartitionedRegion) region).getEvictionController().getCounters().getCounter());
}
private void prepareScenario(EvictionAlgorithm evictionAlgorithm, ObjectSizer sizer) {
createMyCache();
createPartitionedRegion(true, evictionAlgorithm, "PR1", 1, 1, 10000, sizer);
}
private void createMyCache() {
Properties props = new Properties();
DistributedSystem ds = getSystem(props);
assertNotNull(ds);
ds.disconnect();
ds = getSystem(props);
cache = CacheFactory.create(ds);
cache.getResourceManager().setEvictionHeapPercentage(50);
}
private static void createPartitionedRegion(boolean setEvictionOn,
EvictionAlgorithm evictionAlgorithm, String regionName, int totalNoOfBuckets,
int evictionAction, int evictorInterval, ObjectSizer sizer) {
final AttributesFactory factory = new AttributesFactory();
PartitionAttributesFactory partitionAttributesFactory = new PartitionAttributesFactory()
.setRedundantCopies(totalNoOfBuckets == 4 ? 0 : 1).setTotalNumBuckets(totalNoOfBuckets);
factory.setConcurrencyChecksEnabled(false);
factory.setPartitionAttributes(partitionAttributesFactory.create());
if (setEvictionOn) {
if (evictionAlgorithm.isLRUHeap()) {
factory.setEvictionAttributes(EvictionAttributes.createLRUHeapAttributes(sizer,
evictionAction == 1 ? EvictionAction.LOCAL_DESTROY : EvictionAction.OVERFLOW_TO_DISK));
} else if (evictionAlgorithm.isLRUMemory()) {
factory.setEvictionAttributes(EvictionAttributes.createLRUMemoryAttributes(maxSizeInMb,
sizer,
evictionAction == 1 ? EvictionAction.LOCAL_DESTROY : EvictionAction.OVERFLOW_TO_DISK));
} else {
factory.setEvictionAttributes(EvictionAttributes.createLRUEntryAttributes(maxEnteries,
evictionAction == 1 ? EvictionAction.LOCAL_DESTROY : EvictionAction.OVERFLOW_TO_DISK));
}
if (evictionAction == 2) {
factory.setDiskSynchronous(true);
final File[] diskDirs = new File[1];
diskDirs[0] =
new File("Partitioned_Region_Eviction/" + "LogFile" + "_" + OSProcess.getId());
diskDirs[0].mkdirs();
factory.setDiskStoreName(cache.createDiskStoreFactory().setDiskDirs(diskDirs)
.create("EvictionObjectSizerDUnitTest").getName());
}
}
region = cache.createRegion(regionName, factory.create());
assertNotNull(region);
LogWriterUtils.getLogWriter().info("Partitioned Region created Successfully :" + region);
}
/**
* returns data size in bytes
*/
private static int putData(final String regionName, final int noOfElememts,
final int sizeOfElement) {
int result = 0;
final Region pr = cache.getRegion(regionName);
for (int counter = 1; counter <= noOfElememts; counter++) {
byte[] baValue = new byte[sizeOfElement * 1024 * 1024];
int baSize = CachedDeserializableFactory.getByteSize(baValue);
result += baSize;
pr.put(counter, baValue);
}
return result;
}
private static void verifySize(String regionName, int noOfElememts, int entrySize) {
final Region pr = cache.getRegion(regionName);
for (final Map.Entry<Integer, BucketRegion> integerBucketRegionEntry : ((PartitionedRegion) pr)
.getDataStore()
.getAllLocalBuckets()) {
final Map.Entry entry = (Map.Entry) integerBucketRegionEntry;
final BucketRegion bucketRegion = (BucketRegion) entry.getValue();
if (bucketRegion == null) {
continue;
} else {
RegionMap map = bucketRegion.getRegionMap();
if (map == null || map.size() == 0) {
continue;
}
LogWriterUtils.getLogWriter().info("Checking for entry in bucket region: " + bucketRegion);
for (int counter = 1; counter <= noOfElememts; counter++) {
assertEquals(entrySize,
((AbstractLRURegionEntry) map.getEntry(counter)).getEntrySize());
}
}
}
}
private void putCustomizedData(int counter, Object object) {
final Region pr = cache.getRegion("PR1");
pr.put(counter, object);
}
private void putCustomizedObjects(Object key, Object value) {
final Region pr = cache.getRegion("PR1");
pr.put(key, value);
}
private int getSizeOfCustomizedData(int counter) {
final Region pr = cache.getRegion("PR1");
for (final Map.Entry<Integer, BucketRegion> integerBucketRegionEntry : ((PartitionedRegion) pr)
.getDataStore()
.getAllLocalBuckets()) {
final Map.Entry entry = (Map.Entry) integerBucketRegionEntry;
final BucketRegion bucketRegion = (BucketRegion) entry.getValue();
if (bucketRegion == null) {
continue;
} else {
RegionMap map = bucketRegion.getRegionMap();
return ((AbstractLRURegionEntry) map.getEntry(counter)).getEntrySize();
}
}
return 0;
}
private int getSizeOfCustomizedObject(Object object) {
final Region pr = cache.getRegion("PR1");
for (final Map.Entry<Integer, BucketRegion> integerBucketRegionEntry : ((PartitionedRegion) pr)
.getDataStore()
.getAllLocalBuckets()) {
final Map.Entry entry = (Map.Entry) integerBucketRegionEntry;
final BucketRegion bucketRegion = (BucketRegion) entry.getValue();
if (bucketRegion == null) {
continue;
} else {
RegionMap map = bucketRegion.getRegionMap();
AbstractLRURegionEntry re = (AbstractLRURegionEntry) map.getEntry(object);
if (re != null) {
return re.getEntrySize();
}
}
}
return 0;
}
}
| |
/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.react.devsupport;
import android.content.Context;
import android.os.AsyncTask;
import android.os.Handler;
import com.facebook.common.logging.FLog;
import com.facebook.infer.annotation.Assertions;
import com.facebook.react.bridge.UiThreadUtil;
import com.facebook.react.common.ReactConstants;
import com.facebook.react.common.network.OkHttpCallUtil;
import com.facebook.react.devsupport.interfaces.PackagerStatusCallback;
import com.facebook.react.devsupport.interfaces.StackFrame;
import com.facebook.react.modules.systeminfo.AndroidInfoHelpers;
import com.facebook.react.packagerconnection.FileIoHandler;
import com.facebook.react.packagerconnection.JSPackagerClient;
import com.facebook.react.packagerconnection.NotificationOnlyHandler;
import com.facebook.react.packagerconnection.RequestHandler;
import com.facebook.react.packagerconnection.RequestOnlyHandler;
import com.facebook.react.packagerconnection.Responder;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.ConnectionPool;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
import okhttp3.ResponseBody;
import okio.Okio;
import okio.Sink;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
/**
* Helper class for all things about the debug server running in the engineer's host machine.
*
* One can use 'debug_http_host' shared preferences key to provide a host name for the debug server.
* If the setting is empty we support and detect two basic configuration that works well for android
* emulators connectiong to debug server running on emulator's host:
* - Android stock emulator with standard non-configurable local loopback alias: 10.0.2.2,
* - Genymotion emulator with default settings: 10.0.3.2
*/
public class DevServerHelper {
public static final String RELOAD_APP_EXTRA_JS_PROXY = "jsproxy";
private static final String RELOAD_APP_ACTION_SUFFIX = ".RELOAD_APP_ACTION";
private static final String BUNDLE_URL_FORMAT =
"http://%s/%s.bundle?platform=android&dev=%s&minify=%s";
private static final String RESOURCE_URL_FORMAT = "http://%s/%s";
private static final String SOURCE_MAP_URL_FORMAT =
BUNDLE_URL_FORMAT.replaceFirst("\\.bundle", ".map");
private static final String LAUNCH_JS_DEVTOOLS_COMMAND_URL_FORMAT =
"http://%s/launch-js-devtools";
private static final String ONCHANGE_ENDPOINT_URL_FORMAT =
"http://%s/onchange";
private static final String WEBSOCKET_PROXY_URL_FORMAT = "ws://%s/debugger-proxy?role=client";
private static final String PACKAGER_STATUS_URL_FORMAT = "http://%s/status";
private static final String HEAP_CAPTURE_UPLOAD_URL_FORMAT = "http://%s/jscheapcaptureupload";
private static final String INSPECTOR_DEVICE_URL_FORMAT = "http://%s/inspector/device?name=%s&app=%s";
private static final String SYMBOLICATE_URL_FORMAT = "http://%s/symbolicate";
private static final String OPEN_STACK_FRAME_URL_FORMAT = "http://%s/open-stack-frame";
private static final String PACKAGER_OK_STATUS = "packager-status:running";
private static final int LONG_POLL_KEEP_ALIVE_DURATION_MS = 2 * 60 * 1000; // 2 mins
private static final int LONG_POLL_FAILURE_DELAY_MS = 5000;
private static final int HTTP_CONNECT_TIMEOUT_MS = 5000;
private static final String DEBUGGER_MSG_DISABLE = "{ \"id\":1,\"method\":\"Debugger.disable\" }";
public interface OnServerContentChangeListener {
void onServerContentChanged();
}
public interface PackagerCommandListener {
void onPackagerReloadCommand();
void onPackagerDevMenuCommand();
void onCaptureHeapCommand(final Responder responder);
void onPokeSamplingProfilerCommand(final Responder responder);
}
public interface SymbolicationListener {
void onSymbolicationComplete(@Nullable Iterable<StackFrame> stackFrames);
}
private final DevInternalSettings mSettings;
private final OkHttpClient mClient;
private final Handler mRestartOnChangePollingHandler;
private final BundleDownloader mBundleDownloader;
private final String mPackageName;
private boolean mOnChangePollingEnabled;
private @Nullable JSPackagerClient mPackagerClient;
private @Nullable InspectorPackagerConnection mInspectorPackagerConnection;
private @Nullable OkHttpClient mOnChangePollingClient;
private @Nullable OnServerContentChangeListener mOnServerContentChangeListener;
public DevServerHelper(DevInternalSettings settings, String packageName) {
mSettings = settings;
mClient = new OkHttpClient.Builder()
.connectTimeout(HTTP_CONNECT_TIMEOUT_MS, TimeUnit.MILLISECONDS)
.readTimeout(0, TimeUnit.MILLISECONDS)
.writeTimeout(0, TimeUnit.MILLISECONDS)
.build();
mBundleDownloader = new BundleDownloader(mClient);
mRestartOnChangePollingHandler = new Handler();
mPackageName = packageName;
}
public void openPackagerConnection(
final String clientId, final PackagerCommandListener commandListener) {
if (mPackagerClient != null) {
FLog.w(ReactConstants.TAG, "Packager connection already open, nooping.");
return;
}
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... backgroundParams) {
Map<String, RequestHandler> handlers = new HashMap<>();
handlers.put("reload", new NotificationOnlyHandler() {
@Override
public void onNotification(@Nullable Object params) {
commandListener.onPackagerReloadCommand();
}
});
handlers.put("devMenu", new NotificationOnlyHandler() {
@Override
public void onNotification(@Nullable Object params) {
commandListener.onPackagerDevMenuCommand();
}
});
handlers.put("captureHeap", new RequestOnlyHandler() {
@Override
public void onRequest(@Nullable Object params, Responder responder) {
commandListener.onCaptureHeapCommand(responder);
}
});
handlers.put("pokeSamplingProfiler", new RequestOnlyHandler() {
@Override
public void onRequest(@Nullable Object params, Responder responder) {
commandListener.onPokeSamplingProfilerCommand(responder);
}
});
handlers.putAll(new FileIoHandler().handlers());
mPackagerClient = new JSPackagerClient(
clientId,
mSettings.getPackagerConnectionSettings(),
handlers);
mPackagerClient.init();
return null;
}
}.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
public void closePackagerConnection() {
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
if (mPackagerClient != null) {
mPackagerClient.close();
mPackagerClient = null;
}
return null;
}
}.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
public void openInspectorConnection() {
if (mInspectorPackagerConnection != null) {
FLog.w(ReactConstants.TAG, "Inspector connection already open, nooping.");
return;
}
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
mInspectorPackagerConnection = new InspectorPackagerConnection(getInspectorDeviceUrl(), mPackageName);
mInspectorPackagerConnection.connect();
return null;
}
}.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
public void sendEventToAllConnections(String event) {
if (mInspectorPackagerConnection != null) {
mInspectorPackagerConnection.sendEventToAllConnections(event);
}
}
public void disableDebugger() {
if (mInspectorPackagerConnection != null) {
mInspectorPackagerConnection.sendEventToAllConnections(DEBUGGER_MSG_DISABLE);
}
}
public void closeInspectorConnection() {
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
if (mInspectorPackagerConnection != null) {
mInspectorPackagerConnection.closeQuietly();
mInspectorPackagerConnection = null;
}
return null;
}
}.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
public void symbolicateStackTrace(
Iterable<StackFrame> stackFrames,
final SymbolicationListener listener) {
try {
final String symbolicateURL = createSymbolicateURL(
mSettings.getPackagerConnectionSettings().getDebugServerHost());
final JSONArray jsonStackFrames = new JSONArray();
for (final StackFrame stackFrame : stackFrames) {
jsonStackFrames.put(stackFrame.toJSON());
}
final Request request = new Request.Builder()
.url(symbolicateURL)
.post(RequestBody.create(
MediaType.parse("application/json"),
new JSONObject().put("stack", jsonStackFrames).toString()))
.build();
Call symbolicateCall = Assertions.assertNotNull(mClient.newCall(request));
symbolicateCall.enqueue(new Callback() {
@Override
public void onFailure(Call call, IOException e) {
FLog.w(
ReactConstants.TAG,
"Got IOException when attempting symbolicate stack trace: " + e.getMessage());
listener.onSymbolicationComplete(null);
}
@Override
public void onResponse(Call call, final Response response) throws IOException {
try {
listener.onSymbolicationComplete(Arrays.asList(
StackTraceHelper.convertJsStackTrace(new JSONObject(
response.body().string()).getJSONArray("stack"))));
} catch (JSONException exception) {
listener.onSymbolicationComplete(null);
}
}
});
} catch (JSONException e) {
FLog.w(
ReactConstants.TAG,
"Got JSONException when attempting symbolicate stack trace: " + e.getMessage());
}
}
public void openStackFrameCall(StackFrame stackFrame) {
final String openStackFrameURL = createOpenStackFrameURL(
mSettings.getPackagerConnectionSettings().getDebugServerHost());
final Request request = new Request.Builder()
.url(openStackFrameURL)
.post(RequestBody.create(
MediaType.parse("application/json"),
stackFrame.toJSON().toString()))
.build();
Call symbolicateCall = Assertions.assertNotNull(mClient.newCall(request));
symbolicateCall.enqueue(new Callback() {
@Override
public void onFailure(Call call, IOException e) {
FLog.w(
ReactConstants.TAG,
"Got IOException when attempting to open stack frame: " + e.getMessage());
}
@Override
public void onResponse(Call call, final Response response) throws IOException {
// We don't have a listener for this.
}
});
}
/** Intent action for reloading the JS */
public static String getReloadAppAction(Context context) {
return context.getPackageName() + RELOAD_APP_ACTION_SUFFIX;
}
public String getWebsocketProxyURL() {
return String.format(
Locale.US,
WEBSOCKET_PROXY_URL_FORMAT,
mSettings.getPackagerConnectionSettings().getDebugServerHost());
}
public String getHeapCaptureUploadUrl() {
return String.format(
Locale.US,
HEAP_CAPTURE_UPLOAD_URL_FORMAT,
mSettings.getPackagerConnectionSettings().getDebugServerHost());
}
public String getInspectorDeviceUrl() {
return String.format(
Locale.US,
INSPECTOR_DEVICE_URL_FORMAT,
mSettings.getPackagerConnectionSettings().getInspectorServerHost(),
AndroidInfoHelpers.getFriendlyDeviceName(),
mPackageName);
}
public BundleDownloader getBundleDownloader() {
return mBundleDownloader;
}
/**
* @return the host to use when connecting to the bundle server from the host itself.
*/
private String getHostForJSProxy() {
// Use custom port if configured. Note that host stays "localhost".
String host = Assertions.assertNotNull(
mSettings.getPackagerConnectionSettings().getDebugServerHost());
int portOffset = host.lastIndexOf(':');
if (portOffset > -1) {
return "localhost" + host.substring(portOffset);
} else {
return AndroidInfoHelpers.DEVICE_LOCALHOST;
}
}
/**
* @return whether we should enable dev mode when requesting JS bundles.
*/
private boolean getDevMode() {
return mSettings.isJSDevModeEnabled();
}
/**
* @return whether we should request minified JS bundles.
*/
private boolean getJSMinifyMode() {
return mSettings.isJSMinifyEnabled();
}
private static String createBundleURL(
String host,
String jsModulePath,
boolean devMode,
boolean jsMinify) {
return String.format(Locale.US, BUNDLE_URL_FORMAT, host, jsModulePath, devMode, jsMinify);
}
private static String createResourceURL(String host, String resourcePath) {
return String.format(Locale.US, RESOURCE_URL_FORMAT, host, resourcePath);
}
private static String createSymbolicateURL(String host) {
return String.format(Locale.US, SYMBOLICATE_URL_FORMAT, host);
}
private static String createOpenStackFrameURL(String host) {
return String.format(Locale.US, OPEN_STACK_FRAME_URL_FORMAT, host);
}
public String getDevServerBundleURL(final String jsModulePath) {
return createBundleURL(
mSettings.getPackagerConnectionSettings().getDebugServerHost(),
jsModulePath,
getDevMode(),
getJSMinifyMode());
}
public void isPackagerRunning(final PackagerStatusCallback callback) {
String statusURL = createPackagerStatusURL(
mSettings.getPackagerConnectionSettings().getDebugServerHost());
Request request = new Request.Builder()
.url(statusURL)
.build();
mClient.newCall(request).enqueue(
new Callback() {
@Override
public void onFailure(Call call, IOException e) {
FLog.w(
ReactConstants.TAG,
"The packager does not seem to be running as we got an IOException requesting " +
"its status: " + e.getMessage());
callback.onPackagerStatusFetched(false);
}
@Override
public void onResponse(Call call, Response response) throws IOException {
if (!response.isSuccessful()) {
FLog.e(
ReactConstants.TAG,
"Got non-success http code from packager when requesting status: " +
response.code());
callback.onPackagerStatusFetched(false);
return;
}
ResponseBody body = response.body();
if (body == null) {
FLog.e(
ReactConstants.TAG,
"Got null body response from packager when requesting status");
callback.onPackagerStatusFetched(false);
return;
}
if (!PACKAGER_OK_STATUS.equals(body.string())) {
FLog.e(
ReactConstants.TAG,
"Got unexpected response from packager when requesting status: " + body.string());
callback.onPackagerStatusFetched(false);
return;
}
callback.onPackagerStatusFetched(true);
}
});
}
private static String createPackagerStatusURL(String host) {
return String.format(Locale.US, PACKAGER_STATUS_URL_FORMAT, host);
}
public void stopPollingOnChangeEndpoint() {
mOnChangePollingEnabled = false;
mRestartOnChangePollingHandler.removeCallbacksAndMessages(null);
if (mOnChangePollingClient != null) {
OkHttpCallUtil.cancelTag(mOnChangePollingClient, this);
mOnChangePollingClient = null;
}
mOnServerContentChangeListener = null;
}
public void startPollingOnChangeEndpoint(
OnServerContentChangeListener onServerContentChangeListener) {
if (mOnChangePollingEnabled) {
// polling already enabled
return;
}
mOnChangePollingEnabled = true;
mOnServerContentChangeListener = onServerContentChangeListener;
mOnChangePollingClient = new OkHttpClient.Builder()
.connectionPool(new ConnectionPool(1, LONG_POLL_KEEP_ALIVE_DURATION_MS, TimeUnit.MINUTES))
.connectTimeout(HTTP_CONNECT_TIMEOUT_MS, TimeUnit.MILLISECONDS)
.build();
enqueueOnChangeEndpointLongPolling();
}
private void handleOnChangePollingResponse(boolean didServerContentChanged) {
if (mOnChangePollingEnabled) {
if (didServerContentChanged) {
UiThreadUtil.runOnUiThread(new Runnable() {
@Override
public void run() {
if (mOnServerContentChangeListener != null) {
mOnServerContentChangeListener.onServerContentChanged();
}
}
});
}
enqueueOnChangeEndpointLongPolling();
}
}
private void enqueueOnChangeEndpointLongPolling() {
Request request = new Request.Builder().url(createOnChangeEndpointUrl()).tag(this).build();
Assertions.assertNotNull(mOnChangePollingClient).newCall(request).enqueue(new Callback() {
@Override
public void onFailure(Call call, IOException e) {
if (mOnChangePollingEnabled) {
// this runnable is used by onchange endpoint poller to delay subsequent requests in case
// of a failure, so that we don't flood network queue with frequent requests in case when
// dev server is down
FLog.d(ReactConstants.TAG, "Error while requesting /onchange endpoint", e);
mRestartOnChangePollingHandler.postDelayed(
new Runnable() {
@Override
public void run() {
handleOnChangePollingResponse(false);
}
},
LONG_POLL_FAILURE_DELAY_MS);
}
}
@Override
public void onResponse(Call call, Response response) throws IOException {
handleOnChangePollingResponse(response.code() == 205);
}
});
}
private String createOnChangeEndpointUrl() {
return String.format(
Locale.US,
ONCHANGE_ENDPOINT_URL_FORMAT,
mSettings.getPackagerConnectionSettings().getDebugServerHost());
}
private String createLaunchJSDevtoolsCommandUrl() {
return String.format(
Locale.US,
LAUNCH_JS_DEVTOOLS_COMMAND_URL_FORMAT,
mSettings.getPackagerConnectionSettings().getDebugServerHost());
}
public void launchJSDevtools() {
Request request = new Request.Builder()
.url(createLaunchJSDevtoolsCommandUrl())
.build();
mClient.newCall(request).enqueue(new Callback() {
@Override
public void onFailure(Call call, IOException e) {
// ignore HTTP call response, this is just to open a debugger page and there is no reason
// to report failures from here
}
@Override
public void onResponse(Call call, Response response) throws IOException {
// ignore HTTP call response - see above
}
});
}
public String getSourceMapUrl(String mainModuleName) {
return String.format(
Locale.US,
SOURCE_MAP_URL_FORMAT,
mSettings.getPackagerConnectionSettings().getDebugServerHost(),
mainModuleName,
getDevMode(),
getJSMinifyMode());
}
public String getSourceUrl(String mainModuleName) {
return String.format(
Locale.US,
BUNDLE_URL_FORMAT,
mSettings.getPackagerConnectionSettings().getDebugServerHost(),
mainModuleName,
getDevMode(),
getJSMinifyMode());
}
public String getJSBundleURLForRemoteDebugging(String mainModuleName) {
// The host we use when connecting to the JS bundle server from the emulator is not the
// same as the one needed to connect to the same server from the JavaScript proxy running on the
// host itself.
return createBundleURL(
getHostForJSProxy(),
mainModuleName,
getDevMode(),
getJSMinifyMode());
}
/**
* This is a debug-only utility to allow fetching a file via packager.
* It's made synchronous for simplicity, but should only be used if it's absolutely
* necessary.
* @return the file with the fetched content, or null if there's any failure.
*/
public @Nullable File downloadBundleResourceFromUrlSync(
final String resourcePath,
final File outputFile) {
final String resourceURL = createResourceURL(
mSettings.getPackagerConnectionSettings().getDebugServerHost(),
resourcePath);
final Request request = new Request.Builder()
.url(resourceURL)
.build();
try {
Response response = mClient.newCall(request).execute();
if (!response.isSuccessful()) {
return null;
}
Sink output = null;
try {
output = Okio.sink(outputFile);
Okio.buffer(response.body().source()).readAll(output);
} finally {
if (output != null) {
output.close();
}
}
return outputFile;
} catch (Exception ex) {
FLog.e(
ReactConstants.TAG,
"Failed to fetch resource synchronously - resourcePath: \"%s\", outputFile: \"%s\"",
resourcePath,
outputFile.getAbsolutePath(),
ex);
return null;
}
}
}
| |
// Copyright 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.content.app;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.graphics.SurfaceTexture;
import android.os.Bundle;
import android.os.IBinder;
import android.os.Parcelable;
import android.os.Process;
import android.os.RemoteException;
import android.view.Surface;
import org.chromium.base.BaseSwitches;
import org.chromium.base.CommandLine;
import org.chromium.base.ContextUtils;
import org.chromium.base.Log;
import org.chromium.base.annotations.CalledByNative;
import org.chromium.base.annotations.JNINamespace;
import org.chromium.base.annotations.SuppressFBWarnings;
import org.chromium.base.library_loader.LibraryLoader;
import org.chromium.base.library_loader.LibraryProcessType;
import org.chromium.base.library_loader.Linker;
import org.chromium.base.library_loader.ProcessInitException;
import org.chromium.content.browser.ChildProcessConnection;
import org.chromium.content.browser.ChildProcessLauncher;
import org.chromium.content.browser.FileDescriptorInfo;
import org.chromium.content.common.IChildProcessCallback;
import org.chromium.content.common.IChildProcessService;
import org.chromium.content.common.SurfaceWrapper;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicReference;
/**
* This is the base class for child services; the [Non]SandboxedProcessService0, 1.. etc
* subclasses provide the concrete service entry points, to enable the browser to connect
* to more than one distinct process (i.e. one process per service number, up to limit of N).
* The embedding application must declare these service instances in the application section
* of its AndroidManifest.xml, for example with N entries of the form:-
* <service android:name="org.chromium.content.app.[Non]SandboxedProcessServiceX"
* android:process=":[non]sandboxed_processX" />
* for X in 0...N-1 (where N is {@link ChildProcessLauncher#MAX_REGISTERED_SERVICES})
*/
@JNINamespace("content")
@SuppressWarnings("SynchronizeOnNonFinalField")
public class ChildProcessService extends Service {
private static final String MAIN_THREAD_NAME = "ChildProcessMain";
private static final String TAG = "cr.ChildProcessService";
private IChildProcessCallback mCallback;
// This is the native "Main" thread for the renderer / utility process.
private Thread mMainThread;
// Parameters received via IPC, only accessed while holding the mMainThread monitor.
private String[] mCommandLineParams;
private int mCpuCount;
private long mCpuFeatures;
// File descriptors that should be registered natively.
private FileDescriptorInfo[] mFdInfos;
// Linker-specific parameters for this child process service.
private ChromiumLinkerParams mLinkerParams;
private static AtomicReference<Context> sContext = new AtomicReference<Context>(null);
private boolean mLibraryInitialized = false;
// Becomes true once the service is bound. Access must synchronize around mMainThread.
private boolean mIsBound = false;
private final Semaphore mActivitySemaphore = new Semaphore(1);
// Return a Linker instance. If testing, the Linker needs special setup.
private Linker getLinker() {
if (Linker.areTestsEnabled()) {
// For testing, set the Linker implementation and the test runner
// class name to match those used by the parent.
assert mLinkerParams != null;
Linker.setupForTesting(
mLinkerParams.mLinkerImplementationForTesting,
mLinkerParams.mTestRunnerClassNameForTesting);
}
return Linker.getInstance();
}
// Binder object used by clients for this service.
private final IChildProcessService.Stub mBinder = new IChildProcessService.Stub() {
// NOTE: Implement any IChildProcessService methods here.
@Override
public int setupConnection(Bundle args, IChildProcessCallback callback) {
mCallback = callback;
// Required to unparcel FileDescriptorInfo.
args.setClassLoader(getClassLoader());
synchronized (mMainThread) {
// Allow the command line to be set via bind() intent or setupConnection, but
// the FD can only be transferred here.
if (mCommandLineParams == null) {
mCommandLineParams = args.getStringArray(
ChildProcessConnection.EXTRA_COMMAND_LINE);
}
// We must have received the command line by now
assert mCommandLineParams != null;
mCpuCount = args.getInt(ChildProcessConnection.EXTRA_CPU_COUNT);
mCpuFeatures = args.getLong(ChildProcessConnection.EXTRA_CPU_FEATURES);
assert mCpuCount > 0;
Parcelable[] fdInfosAsParcelable =
args.getParcelableArray(ChildProcessConnection.EXTRA_FILES);
// For why this arraycopy is necessary:
// http://stackoverflow.com/questions/8745893/i-dont-get-why-this-classcastexception-occurs
mFdInfos = new FileDescriptorInfo[fdInfosAsParcelable.length];
System.arraycopy(fdInfosAsParcelable, 0, mFdInfos, 0, fdInfosAsParcelable.length);
Bundle sharedRelros = args.getBundle(Linker.EXTRA_LINKER_SHARED_RELROS);
if (sharedRelros != null) {
getLinker().useSharedRelros(sharedRelros);
sharedRelros = null;
}
mMainThread.notifyAll();
}
return Process.myPid();
}
@Override
public void crashIntentionallyForTesting() {
Process.killProcess(Process.myPid());
}
};
/* package */ static Context getContext() {
return sContext.get();
}
@Override
public void onCreate() {
Log.i(TAG, "Creating new ChildProcessService pid=%d", Process.myPid());
if (sContext.get() != null) {
throw new RuntimeException("Illegal child process reuse.");
}
sContext.set(this);
super.onCreate();
mMainThread = new Thread(new Runnable() {
@Override
@SuppressFBWarnings("DM_EXIT")
public void run() {
try {
// CommandLine must be initialized before everything else.
synchronized (mMainThread) {
while (mCommandLineParams == null) {
mMainThread.wait();
}
}
CommandLine.init(mCommandLineParams);
Linker linker = null;
boolean requestedSharedRelro = false;
if (Linker.isUsed()) {
synchronized (mMainThread) {
while (!mIsBound) {
mMainThread.wait();
}
}
linker = getLinker();
if (mLinkerParams.mWaitForSharedRelro) {
requestedSharedRelro = true;
linker.initServiceProcess(mLinkerParams.mBaseLoadAddress);
} else {
linker.disableSharedRelros();
}
}
boolean isLoaded = false;
if (CommandLine.getInstance().hasSwitch(
BaseSwitches.RENDERER_WAIT_FOR_JAVA_DEBUGGER)) {
android.os.Debug.waitForDebugger();
}
boolean loadAtFixedAddressFailed = false;
try {
LibraryLoader.get(LibraryProcessType.PROCESS_CHILD)
.loadNow(getApplicationContext());
isLoaded = true;
} catch (ProcessInitException e) {
if (requestedSharedRelro) {
Log.w(TAG, "Failed to load native library with shared RELRO, "
+ "retrying without");
loadAtFixedAddressFailed = true;
} else {
Log.e(TAG, "Failed to load native library", e);
}
}
if (!isLoaded && requestedSharedRelro) {
linker.disableSharedRelros();
try {
LibraryLoader.get(LibraryProcessType.PROCESS_CHILD)
.loadNow(getApplicationContext());
isLoaded = true;
} catch (ProcessInitException e) {
Log.e(TAG, "Failed to load native library on retry", e);
}
}
if (!isLoaded) {
System.exit(-1);
}
LibraryLoader.get(LibraryProcessType.PROCESS_CHILD)
.registerRendererProcessHistogram(requestedSharedRelro,
loadAtFixedAddressFailed);
LibraryLoader.get(LibraryProcessType.PROCESS_CHILD).initialize();
synchronized (mMainThread) {
mLibraryInitialized = true;
mMainThread.notifyAll();
while (mFdInfos == null) {
mMainThread.wait();
}
}
ContextUtils.initApplicationContext(sContext.get().getApplicationContext());
for (FileDescriptorInfo fdInfo : mFdInfos) {
nativeRegisterGlobalFileDescriptor(
fdInfo.mId, fdInfo.mFd.detachFd(), fdInfo.mOffset, fdInfo.mSize);
}
nativeInitChildProcess(ChildProcessService.this, mCpuCount, mCpuFeatures);
if (mActivitySemaphore.tryAcquire()) {
ContentMain.start();
nativeExitChildProcess();
}
} catch (InterruptedException e) {
Log.w(TAG, "%s startup failed: %s", MAIN_THREAD_NAME, e);
} catch (ProcessInitException e) {
Log.w(TAG, "%s startup failed: %s", MAIN_THREAD_NAME, e);
}
}
}, MAIN_THREAD_NAME);
mMainThread.start();
}
@Override
@SuppressFBWarnings("DM_EXIT")
public void onDestroy() {
Log.i(TAG, "Destroying ChildProcessService pid=%d", Process.myPid());
super.onDestroy();
if (mActivitySemaphore.tryAcquire()) {
// TODO(crbug.com/457406): This is a bit hacky, but there is no known better solution
// as this service will get reused (at least if not sandboxed).
// In fact, we might really want to always exit() from onDestroy(), not just from
// the early return here.
System.exit(0);
return;
}
synchronized (mMainThread) {
try {
while (!mLibraryInitialized) {
// Avoid a potential race in calling through to native code before the library
// has loaded.
mMainThread.wait();
}
} catch (InterruptedException e) {
// Ignore
}
}
// Try to shutdown the MainThread gracefully, but it might not
// have chance to exit normally.
nativeShutdownMainThread();
}
@Override
public IBinder onBind(Intent intent) {
// We call stopSelf() to request that this service be stopped as soon as the client
// unbinds. Otherwise the system may keep it around and available for a reconnect. The
// child processes do not currently support reconnect; they must be initialized from
// scratch every time.
stopSelf();
synchronized (mMainThread) {
mCommandLineParams = intent.getStringArrayExtra(
ChildProcessConnection.EXTRA_COMMAND_LINE);
// mLinkerParams is never used if Linker.isUsed() returns false.
// See onCreate().
mLinkerParams = new ChromiumLinkerParams(intent);
mIsBound = true;
mMainThread.notifyAll();
}
return mBinder;
}
/**
* Called from native code to share a surface texture with another child process.
* Through using the callback object the browser is used as a proxy to route the
* call to the correct process.
*
* @param pid Process handle of the child process to share the SurfaceTexture with.
* @param surfaceObject The Surface or SurfaceTexture to share with the other child process.
* @param primaryID Used to route the call to the correct client instance.
* @param secondaryID Used to route the call to the correct client instance.
*/
@SuppressWarnings("unused")
@CalledByNative
private void establishSurfaceTexturePeer(
int pid, Object surfaceObject, int primaryID, int secondaryID) {
if (mCallback == null) {
Log.e(TAG, "No callback interface has been provided.");
return;
}
Surface surface = null;
boolean needRelease = false;
if (surfaceObject instanceof Surface) {
surface = (Surface) surfaceObject;
} else if (surfaceObject instanceof SurfaceTexture) {
surface = new Surface((SurfaceTexture) surfaceObject);
needRelease = true;
} else {
Log.e(TAG, "Not a valid surfaceObject: %s", surfaceObject);
return;
}
try {
mCallback.establishSurfacePeer(pid, surface, primaryID, secondaryID);
} catch (RemoteException e) {
Log.e(TAG, "Unable to call establishSurfaceTexturePeer: %s", e);
return;
} finally {
if (needRelease) {
surface.release();
}
}
}
@SuppressWarnings("unused")
@CalledByNative
private Surface getViewSurface(int surfaceId) {
if (mCallback == null) {
Log.e(TAG, "No callback interface has been provided.");
return null;
}
try {
SurfaceWrapper wrapper = mCallback.getViewSurface(surfaceId);
return wrapper != null ? wrapper.getSurface() : null;
} catch (RemoteException e) {
Log.e(TAG, "Unable to call getViewSurface: %s", e);
return null;
}
}
@SuppressWarnings("unused")
@CalledByNative
private void createSurfaceTextureSurface(
int surfaceTextureId, int clientId, SurfaceTexture surfaceTexture) {
if (mCallback == null) {
Log.e(TAG, "No callback interface has been provided.");
return;
}
Surface surface = new Surface(surfaceTexture);
try {
mCallback.registerSurfaceTextureSurface(surfaceTextureId, clientId, surface);
} catch (RemoteException e) {
Log.e(TAG, "Unable to call registerSurfaceTextureSurface: %s", e);
}
surface.release();
}
@SuppressWarnings("unused")
@CalledByNative
private void destroySurfaceTextureSurface(int surfaceTextureId, int clientId) {
if (mCallback == null) {
Log.e(TAG, "No callback interface has been provided.");
return;
}
try {
mCallback.unregisterSurfaceTextureSurface(surfaceTextureId, clientId);
} catch (RemoteException e) {
Log.e(TAG, "Unable to call unregisterSurfaceTextureSurface: %s", e);
}
}
@SuppressWarnings("unused")
@CalledByNative
private Surface getSurfaceTextureSurface(int surfaceTextureId) {
if (mCallback == null) {
Log.e(TAG, "No callback interface has been provided.");
return null;
}
try {
return mCallback.getSurfaceTextureSurface(surfaceTextureId).getSurface();
} catch (RemoteException e) {
Log.e(TAG, "Unable to call getSurfaceTextureSurface: %s", e);
return null;
}
}
/**
* Helper for registering FileDescriptorInfo objects with GlobalFileDescriptors.
* This includes the IPC channel, the crash dump signals and resource related
* files.
*/
private static native void nativeRegisterGlobalFileDescriptor(
int id, int fd, long offset, long size);
/**
* The main entry point for a child process. This should be called from a new thread since
* it will not return until the child process exits. See child_process_service.{h,cc}
*
* @param service The current ChildProcessService object.
* renderer.
*/
private static native void nativeInitChildProcess(
ChildProcessService service, int cpuCount, long cpuFeatures);
/**
* Force the child process to exit.
*/
private static native void nativeExitChildProcess();
private native void nativeShutdownMainThread();
}
| |
package liquibase.sqlgenerator.core;
import liquibase.change.ColumnConfig;
import liquibase.database.Database;
import liquibase.datatype.DatabaseDataType;
import liquibase.statement.NotNullConstraint;
import liquibase.statement.core.AddUniqueConstraintStatement;
import liquibase.structure.core.Schema;
import liquibase.datatype.DataTypeFactory;
import liquibase.database.core.*;
import liquibase.structure.core.Column;
import liquibase.structure.core.Table;
import liquibase.exception.ValidationErrors;
import liquibase.exception.UnexpectedLiquibaseException;
import liquibase.sql.Sql;
import liquibase.sql.UnparsedSql;
import liquibase.sqlgenerator.SqlGeneratorChain;
import liquibase.sqlgenerator.SqlGeneratorFactory;
import liquibase.statement.core.AddColumnStatement;
import liquibase.statement.core.AddForeignKeyConstraintStatement;
import liquibase.statement.AutoIncrementConstraint;
import liquibase.statement.ColumnConstraint;
import liquibase.statement.ForeignKeyConstraint;
import liquibase.util.StringUtils;
import java.util.List;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class AddColumnGenerator extends AbstractSqlGenerator<AddColumnStatement> {
@Override
public ValidationErrors validate(AddColumnStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) {
if (statement.isMultiple()) {
ValidationErrors validationErrors = new ValidationErrors();
AddColumnStatement firstColumn = statement.getColumns().get(0);
for (AddColumnStatement column : statement.getColumns()) {
validationErrors.addAll(validateSingleColumn(column, database));
if (firstColumn.getTableName() != null && !firstColumn.getTableName().equals(column.getTableName())) {
validationErrors.addError("All columns must be targeted at the same table");
}
if (column.isMultiple()) {
validationErrors.addError("Nested multiple add column statements are not supported");
}
}
return validationErrors;
} else {
return validateSingleColumn(statement, database);
}
}
private ValidationErrors validateSingleColumn(AddColumnStatement statement, Database database) {
ValidationErrors validationErrors = new ValidationErrors();
validationErrors.checkRequiredField("columnName", statement.getColumnName());
validationErrors.checkRequiredField("columnType", statement.getColumnType());
validationErrors.checkRequiredField("tableName", statement.getTableName());
if (statement.isPrimaryKey() && (database instanceof H2Database
|| database instanceof AbstractDb2Database
|| database instanceof DerbyDatabase
|| database instanceof SQLiteDatabase)) {
validationErrors.addError("Cannot add a primary key column");
}
// TODO HsqlDatabase autoincrement on non primary key? other databases?
if (database instanceof MySQLDatabase && statement.isAutoIncrement() && !statement.isPrimaryKey()) {
validationErrors.addError("Cannot add a non-primary key identity column");
}
// TODO is this feature valid for other databases?
if ((statement.getAddAfterColumn() != null) && !(database instanceof MySQLDatabase)) {
validationErrors.addError("Cannot add column on specific position");
}
if ((statement.getAddBeforeColumn() != null) && !((database instanceof H2Database) || (database instanceof HsqlDatabase))) {
validationErrors.addError("Cannot add column on specific position");
}
if ((statement.getAddAtPosition() != null) && !(database instanceof FirebirdDatabase)) {
validationErrors.addError("Cannot add column on specific position");
}
return validationErrors;
}
@Override
public Sql[] generateSql(AddColumnStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) {
if (statement.isMultiple()) {
return generateMultipleColumns(statement.getColumns(), database);
} else {
return generateSingleColumn(statement, database);
}
}
private Sql[] generateMultipleColumns(List<AddColumnStatement> columns, Database database) {
List<Sql> result = new ArrayList<Sql>();
if (database instanceof MySQLDatabase) {
String alterTable = generateSingleColumBaseSQL(columns.get(0), database);
for (int i = 0; i < columns.size(); i++) {
alterTable += generateSingleColumnSQL(columns.get(i), database);
if (i < columns.size() - 1) {
alterTable += ",";
}
}
result.add(new UnparsedSql(alterTable, getAffectedColumns(columns)));
for (AddColumnStatement statement : columns) {
addUniqueConstraintStatements(statement, database, result);
addForeignKeyStatements(statement, database, result);
}
} else {
for (AddColumnStatement column : columns) {
result.addAll(Arrays.asList(generateSingleColumn(column, database)));
}
}
return result.toArray(new Sql[result.size()]);
}
protected Sql[] generateSingleColumn(AddColumnStatement statement, Database database) {
String alterTable = generateSingleColumBaseSQL(statement, database);
alterTable += generateSingleColumnSQL(statement, database);
List<Sql> returnSql = new ArrayList<Sql>();
returnSql.add(new UnparsedSql(alterTable, getAffectedColumn(statement)));
addUniqueConstraintStatements(statement, database, returnSql);
addForeignKeyStatements(statement, database, returnSql);
return returnSql.toArray(new Sql[returnSql.size()]);
}
protected String generateSingleColumBaseSQL(AddColumnStatement statement, Database database) {
return "ALTER TABLE " + database.escapeTableName(statement.getCatalogName(), statement.getSchemaName(), statement.getTableName());
}
protected String generateSingleColumnSQL(AddColumnStatement statement, Database database) {
DatabaseDataType columnType = DataTypeFactory.getInstance().fromDescription(statement.getColumnType() + (statement.isAutoIncrement() ? "{autoIncrement:true}" : ""), database).toDatabaseDataType(database);
String alterTable = " ADD " + database.escapeColumnName(statement.getCatalogName(), statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + columnType;
if (statement.isAutoIncrement() && database.supportsAutoIncrement()) {
AutoIncrementConstraint autoIncrementConstraint = statement.getAutoIncrementConstraint();
alterTable += " " + database.getAutoIncrementClause(autoIncrementConstraint.getStartWith(), autoIncrementConstraint.getIncrementBy(), autoIncrementConstraint.getGenerationType(), autoIncrementConstraint.getDefaultOnNull());
}
alterTable += getDefaultClause(statement, database);
if (!statement.isNullable()) {
for (ColumnConstraint constraint : statement.getConstraints()) {
if (constraint instanceof NotNullConstraint) {
NotNullConstraint notNullConstraint = (NotNullConstraint) constraint;
if (StringUtils.isNotEmpty(notNullConstraint.getConstraintName())) {
alterTable += " CONSTRAINT " + database.escapeConstraintName(notNullConstraint.getConstraintName());
break;
}
}
}
alterTable += " NOT NULL";
if (database instanceof OracleDatabase) {
alterTable+= !statement.shouldValidateNullable() ? " ENABLE NOVALIDATE " : "";
}
} else {
if (database instanceof SybaseDatabase || database instanceof SybaseASADatabase || database instanceof MySQLDatabase|| (database instanceof MSSQLDatabase && columnType.toString().equalsIgnoreCase("timestamp"))) {
alterTable += " NULL";
}
}
if (statement.isPrimaryKey()) {
alterTable += " PRIMARY KEY";
if (database instanceof OracleDatabase) {
alterTable+= !statement.shouldValidatePrimaryKey() ? " ENABLE NOVALIDATE " : "";
}
}
if( database instanceof MySQLDatabase && statement.getRemarks() != null ) {
alterTable += " COMMENT '" + statement.getRemarks() + "' ";
}
if (statement.getAddAfterColumn() != null && !statement.getAddAfterColumn().isEmpty()) {
alterTable += " AFTER `" + statement.getAddAfterColumn() + "` ";
}
return alterTable;
}
protected Column[] getAffectedColumns(List<AddColumnStatement> columns) {
List<Column> cols = new ArrayList<Column>();
for (AddColumnStatement c : columns) {
cols.add(getAffectedColumn(c));
}
return cols.toArray(new Column[cols.size()]);
}
protected Column getAffectedColumn(AddColumnStatement statement) {
return new Column()
.setRelation(new Table().setName(statement.getTableName()).setSchema(new Schema(statement.getCatalogName(), statement.getSchemaName())))
.setName(statement.getColumnName());
}
protected void addUniqueConstraintStatements(AddColumnStatement statement, Database database, List<Sql> returnSql) {
if (statement.isUnique()) {
AddUniqueConstraintStatement addConstraintStmt = new AddUniqueConstraintStatement(statement.getCatalogName(), statement.getSchemaName(), statement.getTableName(), ColumnConfig.arrayFromNames(statement.getColumnName()), statement.getUniqueStatementName());
addConstraintStmt.setShouldValidate(statement.shouldValidateUnique());
returnSql.addAll(Arrays.asList(SqlGeneratorFactory.getInstance().generateSql(addConstraintStmt, database)));
}
}
protected void addForeignKeyStatements(AddColumnStatement statement, Database database, List<Sql> returnSql) {
for (ColumnConstraint constraint : statement.getConstraints()) {
if (constraint instanceof ForeignKeyConstraint) {
ForeignKeyConstraint fkConstraint = (ForeignKeyConstraint) constraint;
String refSchemaName = null;
String refTableName;
String refColName;
if (fkConstraint.getReferences() != null) {
Matcher referencesMatcher = Pattern.compile("([\\w\\._]+)\\(([\\w_]+)\\)").matcher(fkConstraint.getReferences());
if (!referencesMatcher.matches()) {
throw new UnexpectedLiquibaseException("Don't know how to find table and column names from " + fkConstraint.getReferences());
}
refTableName = referencesMatcher.group(1);
refColName = referencesMatcher.group(2);
} else {
refTableName = ((ForeignKeyConstraint) constraint).getReferencedTableName();
refColName = ((ForeignKeyConstraint) constraint).getReferencedColumnNames();
}
if (refTableName.indexOf(".") > 0) {
refSchemaName = refTableName.split("\\.")[0];
refTableName = refTableName.split("\\.")[1];
}
AddForeignKeyConstraintStatement addForeignKeyConstraintStatement = new AddForeignKeyConstraintStatement(fkConstraint.getForeignKeyName(), statement.getCatalogName(), statement.getSchemaName(), statement.getTableName(), ColumnConfig.arrayFromNames(statement.getColumnName()), null, refSchemaName, refTableName, ColumnConfig.arrayFromNames(refColName));
addForeignKeyConstraintStatement.setShouldValidate(fkConstraint.shouldValidateForeignKey());
returnSql.addAll(Arrays.asList(SqlGeneratorFactory.getInstance().generateSql(addForeignKeyConstraintStatement, database)));
}
}
}
private String getDefaultClause(AddColumnStatement statement, Database database) {
String clause = "";
Object defaultValue = statement.getDefaultValue();
if (defaultValue != null) {
if (database instanceof OracleDatabase && defaultValue.toString().startsWith("GENERATED ALWAYS ")) {
clause += " " + DataTypeFactory.getInstance().fromObject(defaultValue, database).objectToSql(defaultValue, database);
} else {
if (database instanceof MSSQLDatabase) {
String constraintName = statement.getDefaultValueConstraintName();
if (constraintName == null) {
constraintName = ((MSSQLDatabase) database).generateDefaultConstraintName(statement.getTableName(), statement.getColumnName());
}
clause += " CONSTRAINT " + constraintName;
}
clause += " DEFAULT " + DataTypeFactory.getInstance().fromDescription(statement.getColumnType(), database).objectToSql(defaultValue, database);
}
}
return clause;
}
}
| |
/*
* Copyright (C) 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.example.ocr;
import android.graphics.Bitmap;
import android.graphics.Rect;
import android.util.Log;
import java.io.File;
/**
* Java interface for the Tesseract OCR engine. Does not implement all available
* JNI methods, but does implement enough to be useful. Comments are adapted
* from original Tesseract source.
*
* @author alanv@google.com (Alan Viverette)
*/
public class TessBaseAPI {
/**
* Used by the native implementation of the class.
*/
private int mNativeData;
static {
try{
System.loadLibrary("lept");
System.loadLibrary("tess");
} catch (UnsatisfiedLinkError e){
Log.d("TessBaseAPI", "TessBaseAPI UnsatisfiedLinkError.But this doesn't mean the lib was not loaded before");
} catch(Exception e){
Log.d("TessBaseAPI", "TessBaseAPI load jni lib failed.But this doesn't mean the lib was not loaded before");
}
nativeClassInit();
}
public static final class PageSegMode {
/** Orientation and script detection only. */
public static final int PSM_OSD_ONLY = 0;
/**
* Automatic page segmentation with orientation and script detection.
* (OSD)
*/
public static final int PSM_AUTO_OSD = 1;
/** Automatic page segmentation, but no OSD, or OCR. */
public static final int PSM_AUTO_ONLY = 2;
/** Fully automatic page segmentation, but no OSD. */
public static final int PSM_AUTO = 3;
/** Assume a single column of text of variable sizes. */
public static final int PSM_SINGLE_COLUMN = 4;
/** Assume a single uniform block of vertically aligned text. */
public static final int PSM_SINGLE_BLOCK_VERT_TEXT = 5;
/** Assume a single uniform block of text. (Default.) */
public static final int PSM_SINGLE_BLOCK = 6;
/** Treat the image as a single text line. */
public static final int PSM_SINGLE_LINE = 7;
/** Treat the image as a single word. */
public static final int PSM_SINGLE_WORD = 8;
/** Treat the image as a single word in a circle. */
public static final int PSM_CIRCLE_WORD = 9;
/** Treat the image as a single character. */
public static final int PSM_SINGLE_CHAR = 10;
/** Number of enum entries. */
public static final int PSM_COUNT = 11;
}
/**
* Elements of the page hierarchy, used in {@link ResultIterator} to provide
* functions that operate on each level without having to have 5x as many
* functions.
* <p>
* NOTE: At present {@link #RIL_PARA} and {@link #RIL_BLOCK} are equivalent
* as there is no paragraph internally yet.
*/
public static final class PageIteratorLevel {
/** Block of text/image/separator line. */
public static final int RIL_BLOCK = 0;
/** Paragraph within a block. */
public static final int RIL_PARA = 1;
/** Line within a paragraph. */
public static final int RIL_TEXTLINE = 2;
/** Word within a text line. */
public static final int RIL_WORD = 3;
/** Symbol/character within a word. */
public static final int RIL_SYMBOL = 4;
}
/** Default accuracy versus speed mode. */
public static final int AVS_FASTEST = 0;
/** Slowest and most accurate mode. */
public static final int AVS_MOST_ACCURATE = 100;
/** Whitelist of characters to recognize. */
public static final String VAR_CHAR_WHITELIST = "tessedit_char_whitelist";
/** Blacklist of characters to not recognize. */
public static final String VAR_CHAR_BLACKLIST = "tessedit_char_blacklist";
/** Accuracy versus speed setting. */
public static final String VAR_ACCURACYVSPEED = "tessedit_accuracyvspeed";
/**
* Constructs an instance of TessBaseAPI.
*/
public TessBaseAPI() {
nativeConstruct();
}
/**
* Called by the GC to clean up the native data that we set up when we
* construct the object.
*/
@Override
protected void finalize() throws Throwable {
try {
nativeFinalize();
} finally {
super.finalize();
}
}
/**
* Initializes the Tesseract engine with a specified language model. Returns
* <code>true</code> on success.
* <p>
* Instances are now mostly thread-safe and totally independent, but some
* global parameters remain. Basically it is safe to use multiple
* TessBaseAPIs in different threads in parallel, UNLESS you use SetVariable
* on some of the Params in classify and textord. If you do, then the effect
* will be to change it for all your instances.
* <p>
* The datapath must be the name of the parent directory of tessdata and
* must end in / . Any name after the last / will be stripped. The language
* is (usually) an ISO 639-3 string or <code>null</code> will default to
* eng. It is entirely safe (and eventually will be efficient too) to call
* Init multiple times on the same instance to change language, or just to
* reset the classifier.
* <p>
* <b>WARNING:</b> On changing languages, all Tesseract parameters are reset
* back to their default values. (Which may vary between languages.)
* <p>
* If you have a rare need to set a Variable that controls initialization
* for a second call to Init you should explicitly call End() and then use
* SetVariable before Init. This is only a very rare use case, since there
* are very few uses that require any parameters to be set before Init.
*
* @param datapath the parent directory of tessdata ending in a forward
* slash
* @param language (optional) an ISO 639-3 string representing the language
* @return <code>true</code> on success
*/
public boolean init(String datapath, String language) {
if (datapath == null) {
throw new IllegalArgumentException("Data path must not be null!");
}
if (!datapath.endsWith(File.separator)) {
datapath += File.separator;
}
File tessdata = new File(datapath + "tessdata");
if (!tessdata.exists() || !tessdata.isDirectory()) {
throw new IllegalArgumentException("Data path must contain subfolder tessdata!");
}
return nativeInit(datapath, language);
}
/**
* Frees up recognition results and any stored image data, without actually
* freeing any recognition data that would be time-consuming to reload.
* Afterwards, you must call SetImage or SetRectangle before doing any
* Recognize or Get* operation.
*/
public void clear() {
nativeClear();
}
/**
* Closes down tesseract and free up all memory. End() is equivalent to
* destructing and reconstructing your TessBaseAPI.
* <p>
* Once End() has been used, none of the other API functions may be used
* other than Init and anything declared above it in the class definition.
*/
public void end() {
nativeEnd();
}
/**
* Set the value of an internal "variable" (of either old or new types).
* Supply the name of the variable and the value as a string, just as you
* would in a config file.
* <p>
* Example:
* <code>setVariable(VAR_TESSEDIT_CHAR_BLACKLIST, "xyz"); to ignore x, y and z. * setVariable(VAR_BLN_NUMERICMODE, "1"); to set numeric-only mode. * </code>
* <p>
* setVariable() may be used before open(), but settings will revert to
* defaults on close().
*
* @param var name of the variable
* @param value value to set
* @return false if the name lookup failed
*/
public boolean setVariable(String var, String value) {
return nativeSetVariable(var, value);
}
/**
* Sets the page segmentation mode. This controls how much processing the
* OCR engine will perform before recognizing text.
*
* @param mode the page segmentation mode to set
*/
public void setPageSegMode(int mode) {
nativeSetPageSegMode(mode);
}
/**
* Sets debug mode. This controls how much information is displayed in the
* log during recognition.
*
* @param enabled <code>true</code> to enable debugging mode
*/
public void setDebug(boolean enabled) {
nativeSetDebug(enabled);
}
/**
* Restricts recognition to a sub-rectangle of the image. Call after
* SetImage. Each SetRectangle clears the recogntion results so multiple
* rectangles can be recognized with the same image.
*
* @param rect the bounding rectangle
*/
public void setRectangle(Rect rect) {
setRectangle(rect.left, rect.top, rect.width(), rect.height());
}
/**
* Restricts recognition to a sub-rectangle of the image. Call after
* SetImage. Each SetRectangle clears the recogntion results so multiple
* rectangles can be recognized with the same image.
*
* @param left the left bound
* @param top the right bound
* @param width the width of the bounding box
* @param height the height of the bounding box
*/
public void setRectangle(int left, int top, int width, int height) {
nativeSetRectangle(left, top, width, height);
}
/**
* Provides an image for Tesseract to recognize.
*
* @param file absolute path to the image file
*/
public void setImage(File file) {
Pix image = ReadFile.readFile(file);
if (image == null) {
throw new RuntimeException("Failed to read image file");
}
nativeSetImagePix(image.getNativePix());
}
/**
* Provides an image for Tesseract to recognize. Does not copy the image
* buffer. The source image must persist until after Recognize or
* GetUTF8Chars is called.
*
* @param bmp bitmap representation of the image
*/
public void setImage(Bitmap bmp) {
Pix image = ReadFile.readBitmap(bmp);
if (image == null) {
throw new RuntimeException("Failed to read bitmap");
}
nativeSetImagePix(image.getNativePix());
}
/**
* Provides a Leptonica pix format image for Tesseract to recognize. Clones
* the pix object. The source image may be destroyed immediately after
* SetImage is called, but its contents may not be modified.
*
* @param image Leptonica pix representation of the image
*/
public void setImage(Pix image) {
nativeSetImagePix(image.getNativePix());
}
/**
* Provides an image for Tesseract to recognize. Copies the image buffer.
* The source image may be destroyed immediately after SetImage is called.
* SetImage clears all recognition results, and sets the rectangle to the
* full image, so it may be followed immediately by a GetUTF8Text, and it
* will automatically perform recognition.
*
* @param imagedata byte representation of the image
* @param width image width
* @param height image height
* @param bpp bytes per pixel
* @param bpl bytes per line
*/
public void setImage(byte[] imagedata, int width, int height, int bpp, int bpl) {
nativeSetImageBytes(imagedata, width, height, bpp, bpl);
}
/**
* The recognized text is returned as a String which is coded as UTF8.
*
* @return the recognized text
*/
public String getUTF8Text() {
// Trim because the text will have extra line breaks at the end
String text = nativeGetUTF8Text();
return text.trim();
}
public Pixa getRegions() {
int pixa = nativeGetRegions();
if (pixa == 0) {
return null;
}
return new Pixa(pixa, 0, 0);
}
public Pixa getWords() {
int pixa = nativeGetWords();
if (pixa == 0) {
return null;
}
return new Pixa(pixa, 0, 0);
}
/**
* Returns the mean confidence of text recognition.
*
* @return the mean confidence
*/
public int meanConfidence() {
return nativeMeanConfidence();
}
/**
* Returns all word confidences (between 0 and 100) in an array. The number
* of confidences should correspond to the number of space-delimited words
* in GetUTF8Text().
*
* @return an array of word confidences (between 0 and 100) for each
* space-delimited word returned by GetUTF8Text()
*/
public int[] wordConfidences() {
int[] conf = nativeWordConfidences();
// We shouldn't return null confidences
if (conf == null) {
conf = new int[0];
}
return conf;
}
public ResultIterator getResultIterator() {
int nativeResultIterator = nativeGetResultIterator();
if (nativeResultIterator == 0) {
return null;
}
return new ResultIterator(nativeResultIterator);
}
// ******************
// * Native methods *
// ******************
/**
* Initializes static native data. Must be called on object load.
*/
private static native void nativeClassInit();
/**
* Initializes native data. Must be called on object construction.
*/
private native void nativeConstruct();
/**
* Finalizes native data. Must be called on object destruction.
*/
private native void nativeFinalize();
private native boolean nativeInit(String datapath, String language);
private native void nativeClear();
private native void nativeEnd();
private native void nativeSetImageBytes(
byte[] imagedata, int width, int height, int bpp, int bpl);
private native void nativeSetImagePix(int nativePix);
private native void nativeSetRectangle(int left, int top, int width, int height);
private native String nativeGetUTF8Text();
private native int nativeGetRegions();
private native int nativeGetWords();
private native int nativeMeanConfidence();
private native int[] nativeWordConfidences();
private native boolean nativeSetVariable(String var, String value);
private native void nativeSetDebug(boolean debug);
private native void nativeSetPageSegMode(int mode);
private native int nativeGetResultIterator();
}
| |
/**
* Java Modular Image Synthesis Toolkit (JMIST)
* Copyright (C) 2018 Bradley W. Kimmel
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package ca.eandb.jmist.framework.accel;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import ca.eandb.jmist.framework.IntersectionRecorder;
import ca.eandb.jmist.framework.NearestIntersectionRecorder;
import ca.eandb.jmist.framework.SceneElement;
import ca.eandb.jmist.framework.scene.SceneElementDecorator;
import ca.eandb.jmist.math.Box3;
import ca.eandb.jmist.math.Interval;
import ca.eandb.jmist.math.MathUtil;
import ca.eandb.jmist.math.Ray3;
import ca.eandb.jmist.util.ArrayUtil;
import ca.eandb.util.UnexpectedException;
/**
* A decorator <code>SceneElement</code> that applies a bounded interval
* hierarchy (BIH) to the primitives of the underlying <code>SceneElement</code>.
*
* This is an implementation of the algorithm described in the following paper:
*
* <blockquote>
* C. Wachter, A. Keller,
* <a href="http://ainc.de/Research/BIH.pdf">Instant ray tracing: The bounded interval hierarchy</a>,
* In <em>Proceedings of the Eurgraphics Symposium on Rendering</em>
* pp. 139-149, 2006.
* </blockquote>
*
* @author Brad Kimmel
*/
public final class BoundingIntervalHierarchy extends SceneElementDecorator {
/** Serialization version ID. */
private static final long serialVersionUID = -5882424225852208674L;
private transient int[] items;
private transient NodeBuffer buffer;
private transient int root;
private transient Box3 boundingBox;
private transient boolean ready = false;
private final int maxItemsPerLeaf = 2;
private final double tolerance = MathUtil.EPSILON;
/**
* @param inner The <code>SceneElement</code> to apply the bounding interval
* hierarchy to.
*/
public BoundingIntervalHierarchy(SceneElement inner) {
super(inner);
}
/**
* @param inner The <code>SceneElement</code> to apply the bounding interval
* hierarchy to.
* @param filename The name of the file containing the pre-generated bounding
* interval hierarchy.
* @throws IOException If the specified file could not be read.
*/
public BoundingIntervalHierarchy(SceneElement inner, String filename) throws IOException {
super(inner);
File file = new File(filename);
if (file.isFile()) {
FileInputStream fs = new FileInputStream(file);
try {
restore(fs);
} finally {
fs.close();
}
} else {
FileOutputStream fs = new FileOutputStream(file);
try {
save(fs);
fs.flush();
} finally {
fs.close();
}
}
}
public void save(OutputStream out) throws IOException {
ensureReady();
ObjectOutputStream oos = new ObjectOutputStream(out);
oos.writeObject(items);
oos.writeInt(root);
oos.writeObject(boundingBox);
oos.writeInt(buffer.next);
byte[] buf = buffer.buf.array();
oos.write(buf, 0, buffer.next);
oos.flush();
System.out.printf("Wrote %d bytes from backing array of size %d.", buffer.next, buf.length);
System.out.println();
}
public void restore(InputStream in) throws IOException {
ObjectInputStream ois = new ObjectInputStream(in);
try {
items = (int[]) ois.readObject();
root = ois.readInt();
boundingBox = (Box3) ois.readObject();
buffer = new NodeBuffer();
int size = ois.readInt();
byte[] buf = new byte[size];
int pos = 0;
while (pos < size) {
int read = ois.read(buf, pos, size - pos);
if (read <= 0) {
throw new IOException(String.format("Failed to read node buffer, only read %d of %d bytes (read=%d).", pos, size, read));
}
pos += read;
}
buffer.buf = ByteBuffer.wrap(buf);
buffer.next = size;
ready = true;
} catch (ClassNotFoundException e) {
throw new UnexpectedException(e);
}
System.out.println("Restored BIH from file.");
}
public void dump() {
dump(root, 0);
}
private void dump(int node, int depth) {
int type = buffer.getType(node);
if (type == NodeBuffer.TYPE_LEAF) {
int start = buffer.getStart(node);
int end = buffer.getEnd(node);
indent(depth);
System.out.printf("LEAF(%d,%d):", start, end);
for (int i = start; i < end; i++) {
System.out.printf(" %d", items[i]);
}
System.out.println();
} else {
int axis = 'x' + type;
double left = buffer.getLeftPlane(node);
double right = buffer.getRightPlane(node);
indent(depth);
System.out.printf("INTERNAL: AXIS=%c, CLIP=(%f, %f)\n", axis, left, right);
int leftChild = buffer.getLeftChild(node);
if (leftChild >= 0) {
indent(depth);
System.out.println("{L");
dump(leftChild, depth + 1);
indent(depth);
System.out.println("}");
}
int rightChild = buffer.getRightChild(node);
if (rightChild >= 0) {
indent(depth);
System.out.println("{R");
dump(rightChild, depth + 1);
indent(depth);
System.out.println("}");
}
}
}
private void indent(int depth) {
for (int i = 0; i < depth; i++) {
System.out.print(" ");
}
}
private void ensureReady() {
if (!ready) {
build();
}
}
private synchronized void build() {
if (!ready) {
buffer = new NodeBuffer();
items = ArrayUtil.range(0, super.getNumPrimitives());
boundingBox = boundingBox();
Bound bound = new Bound(boundingBox);
Clip clip = new Clip();
root = buffer.allocateInternal();
build(root, bound, 0, items.length, clip);
ready = true;
}
}
private void build(int offset, Bound bound, int start, int end, Clip clip) {
assert(end > start);
double lenx = bound.maxx - bound.minx;
double leny = bound.maxy - bound.miny;
double lenz = bound.maxz - bound.minz;
double maxlen = Math.max(Math.max(lenx, leny), lenz);
int axis;
double plane;
if (lenx > leny && lenx > lenz) {
axis = 0;
plane = 0.5 * (bound.minx + bound.maxx);
} else if (leny > lenz) {
axis = 1;
plane = 0.5 * (bound.miny + bound.maxy);
} else {
axis = 2;
plane = 0.5 * (bound.minz + bound.maxz);
}
int split = split(axis, plane, start, end, clip);
int left = split - start;
int right = end - split;
int leftChild = (left > maxItemsPerLeaf && maxlen >= tolerance) ? buffer.allocateInternal() : (left > 0) ? buffer.allocateLeaf() : -1;
int rightChild = (right > maxItemsPerLeaf && maxlen >= tolerance) ? buffer.allocateInternal() : (right > 0) ? buffer.allocateLeaf() : -1;
assert(offset >= 0);
int firstChild = (left > 0) ? leftChild : rightChild;
buffer.writeInternal(offset, axis, clip, firstChild);
// add new node here
if (left > maxItemsPerLeaf && maxlen >= tolerance) {
double temp = bound.setMax(axis, plane);
build(leftChild, bound, start, split, clip);
bound.setMax(axis, temp);
} else if (left > 0) {
assert(leftChild >= 0);
if (start == 1) {
start = 1;
}
buffer.writeLeaf(leftChild, start, split);
}
if (right > maxItemsPerLeaf && maxlen >= tolerance) {
double temp = bound.setMin(axis, plane);
build(rightChild, bound, split, end, clip);
bound.setMin(axis, temp);
} else if (right > 0) {
assert(rightChild >= 0);
if (split == 1) {
split = 1;
}
buffer.writeLeaf(rightChild, split, end);
}
}
private static class Bound {
public double minx;
public double maxx;
public double miny;
public double maxy;
public double minz;
public double maxz;
public Bound(Box3 box) {
minx = box.minimumX();
miny = box.minimumY();
minz = box.minimumZ();
maxx = box.maximumX();
maxy = box.maximumY();
maxz = box.maximumZ();
}
public double setMin(int axis, double value) {
double temp;
switch (axis) {
case 0:
temp = minx;
minx = value;
return temp;
case 1:
temp = miny;
miny = value;
return temp;
case 2:
temp = minz;
minz = value;
return temp;
}
throw new IllegalArgumentException();
}
public double setMax(int axis, double value) {
double temp;
switch (axis) {
case 0:
temp = maxx;
maxx = value;
return temp;
case 1:
temp = maxy;
maxy = value;
return temp;
case 2:
temp = maxz;
maxz = value;
return temp;
}
throw new IllegalArgumentException();
}
}
private static class Clip {
public double left;
public double right;
public Clip() {
reset();
}
public void reset() {
left = Double.NEGATIVE_INFINITY;
right = Double.POSITIVE_INFINITY;
}
}
private int split(int axis, double plane, int start, int end, Clip clip) {
double min, max, mid;
int split = start;
clip.reset();
for (int i = start; i < end; i++) {
Box3 bound = getBoundingBox(items[i]);
min = bound.minimum(axis);
max = bound.maximum(axis);
mid = 0.5 * (min + max);
if (mid < plane) {
if (max > clip.left) {
clip.left = max;
}
if (i > split) {
ArrayUtil.swap(items, split, i);
}
split++;
} else {
if (min < clip.right) {
clip.right = min;
}
}
}
return split;
}
private static final class NodeBuffer {
private static final int SIZE_INTERNAL = 12;
private static final int SIZE_LEAF = 8;
public static final int TYPE_LEAF = 3;
private ByteBuffer buf = ByteBuffer.allocate(16384);
private int next = 0;
public void writeInternal(int offset, int axis, Clip clip, int firstChild) {
assert((firstChild & 0x3) == 0);
buf.position(offset);
buf.putInt(firstChild | axis);
float left = (float) clip.left;
float right = (float) clip.right;
if (left < clip.left) {
left = Math.nextUp(left);
}
if (right > clip.right) {
right = Math.nextDown(right);
}
buf.putFloat(left);
buf.putFloat(right);
}
public void writeLeaf(int offset, int start, int end) {
buf.position(offset);
buf.putInt((start << 2) | 3);
buf.putInt(end);
}
public int allocateInternal() {
return allocate(SIZE_INTERNAL);
}
public int allocateLeaf() {
return allocate(SIZE_LEAF);
}
private int allocate(int size) {
int result = next;
next += size;
if (next > buf.capacity()) {
ByteBuffer newBuf = ByteBuffer.allocate(2 * buf.capacity());
buf.clear();
newBuf.put(buf);
buf = newBuf;
}
return result;
}
public int getStart(int offset) {
return buf.getInt(offset) >> 2;
}
public int getEnd(int offset) {
return buf.getInt(offset + 4);
}
public int getType(int offset) {
return buf.getInt(offset) & 0x3;
}
public boolean isLeaf(int offset) {
return getType(offset) == TYPE_LEAF;
}
public int getNext(int offset) {
if (isLeaf(offset)) {
return offset + SIZE_LEAF;
} else {
return offset + SIZE_INTERNAL;
}
}
public int getLeftChild(int offset) {
return Double.isInfinite(getLeftPlane(offset)) ? -1 : getFirstChild(offset);
}
public int getRightChild(int offset) {
if (Double.isInfinite(getRightPlane(offset))) {
return -1;
} else if (Double.isInfinite(getLeftPlane(offset))) {
return getFirstChild(offset);
} else {
return getNext(getFirstChild(offset));
}
}
public int getFirstChild(int offset) {
return buf.getInt(offset) & ~0x3;
}
public double getLeftPlane(int offset) {
return (double) buf.getFloat(offset + 4);
}
public double getRightPlane(int offset) {
return (double) buf.getFloat(offset + 8);
}
}
@Override
public void intersect(Ray3 ray, IntersectionRecorder recorder) {
ensureReady();
Interval I = boundingBox.intersect(ray).intersect(recorder.interval());
if (!I.isEmpty()) {
intersectNode(root, I.minimum(), I.maximum(), ray, recorder);
}
}
private void intersectNode(int node, double near, double far, Ray3 ray,
IntersectionRecorder recorder) {
if (far < near) {
return;
}
int type = buffer.getType(node);
if (type == NodeBuffer.TYPE_LEAF) {
int start = buffer.getStart(node);
int end = buffer.getEnd(node);
for (int i = start; i < end; i++) {
if (i == 1) {
i = 1;
}
super.intersect(items[i], ray, recorder);
}
} else {
double p = ray.origin().get(type);
double v = ray.direction().get(type);
double lp = buffer.getLeftPlane(node);
double rp = buffer.getRightPlane(node);
boolean aligned = Math.abs(v) < MathUtil.SMALL_EPSILON;
double ld = aligned ? Double.NEGATIVE_INFINITY : (lp - p) / v;
double rd = aligned ? Double.POSITIVE_INFINITY : (rp - p) / v;
if (v > MathUtil.SMALL_EPSILON) { // left to right
if (near < ld) {
int child = buffer.getLeftChild(node);
if (child >= 0) {
intersectNode(child, near, Math.min(ld, far), ray, recorder);
far = Math.min(far, recorder.interval().maximum());
}
}
if (rd < far) {
int child = buffer.getRightChild(node);
if (child >= 0) {
intersectNode(child, Math.max(near, rd), far, ray, recorder);
}
}
} else { // aligned or right to left
if (near < rd) {
int child = buffer.getRightChild(node);
if (child >= 0) {
intersectNode(child, near, Math.min(rd, far), ray, recorder);
far = Math.min(far, recorder.interval().maximum());
}
}
if (ld < far) {
int child = buffer.getLeftChild(node);
if (child >= 0) {
intersectNode(child, Math.max(near, ld), far, ray, recorder);
}
}
}
}
}
@Override
public boolean visibility(Ray3 ray) {
NearestIntersectionRecorder recorder = new NearestIntersectionRecorder(new Interval(0.0, ray.limit()));
intersect(ray, recorder);
return recorder.isEmpty();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.functions;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeutils.base.IntSerializer;
import org.apache.flink.api.java.typeutils.TypeExtractor;
import org.apache.flink.api.java.typeutils.ValueTypeInfo;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
import org.apache.flink.streaming.api.functions.source.FromElementsFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.api.operators.StreamSource;
import org.apache.flink.streaming.runtime.tasks.OperatorStateHandles;
import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness;
import org.apache.flink.types.Value;
import org.apache.flink.util.ExceptionUtils;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.*;
/**
* Tests for the {@link org.apache.flink.streaming.api.functions.source.FromElementsFunction}.
*/
public class FromElementsFunctionTest {
@Test
public void testStrings() {
try {
String[] data = { "Oh", "boy", "what", "a", "show", "!"};
FromElementsFunction<String> source = new FromElementsFunction<String>(
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), data);
List<String> result = new ArrayList<String>();
source.run(new ListSourceContext<String>(result));
assertEquals(Arrays.asList(data), result);
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testNonJavaSerializableType() {
try {
MyPojo[] data = { new MyPojo(1, 2), new MyPojo(3, 4), new MyPojo(5, 6) };
FromElementsFunction<MyPojo> source = new FromElementsFunction<MyPojo>(
TypeExtractor.getForClass(MyPojo.class).createSerializer(new ExecutionConfig()), data);
List<MyPojo> result = new ArrayList<MyPojo>();
source.run(new ListSourceContext<MyPojo>(result));
assertEquals(Arrays.asList(data), result);
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testSerializationError() {
try {
TypeInformation<SerializationErrorType> info =
new ValueTypeInfo<SerializationErrorType>(SerializationErrorType.class);
try {
new FromElementsFunction<SerializationErrorType>(
info.createSerializer(new ExecutionConfig()), new SerializationErrorType());
fail("should fail with an exception");
}
catch (IOException e) {
assertTrue(ExceptionUtils.stringifyException(e).contains("test exception"));
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testDeSerializationError() {
try {
TypeInformation<DeserializeTooMuchType> info =
new ValueTypeInfo<DeserializeTooMuchType>(DeserializeTooMuchType.class);
FromElementsFunction<DeserializeTooMuchType> source = new FromElementsFunction<DeserializeTooMuchType>(
info.createSerializer(new ExecutionConfig()), new DeserializeTooMuchType());
try {
source.run(new ListSourceContext<DeserializeTooMuchType>(new ArrayList<DeserializeTooMuchType>()));
fail("should fail with an exception");
}
catch (IOException e) {
assertTrue(ExceptionUtils.stringifyException(e).contains("user-defined serialization"));
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testCheckpointAndRestore() {
try {
final int NUM_ELEMENTS = 10000;
List<Integer> data = new ArrayList<Integer>(NUM_ELEMENTS);
List<Integer> result = new ArrayList<Integer>(NUM_ELEMENTS);
for (int i = 0; i < NUM_ELEMENTS; i++) {
data.add(i);
}
final FromElementsFunction<Integer> source = new FromElementsFunction<>(IntSerializer.INSTANCE, data);
StreamSource<Integer, FromElementsFunction<Integer>> src = new StreamSource<>(source);
AbstractStreamOperatorTestHarness<Integer> testHarness =
new AbstractStreamOperatorTestHarness<>(src, 1, 1, 0);
testHarness.open();
final SourceFunction.SourceContext<Integer> ctx = new ListSourceContext<Integer>(result, 2L);
final Throwable[] error = new Throwable[1];
// run the source asynchronously
Thread runner = new Thread() {
@Override
public void run() {
try {
source.run(ctx);
}
catch (Throwable t) {
error[0] = t;
}
}
};
runner.start();
// wait for a bit
Thread.sleep(1000);
// make a checkpoint
List<Integer> checkpointData = new ArrayList<>(NUM_ELEMENTS);
OperatorStateHandles handles = null;
synchronized (ctx.getCheckpointLock()) {
handles = testHarness.snapshot(566, System.currentTimeMillis());
checkpointData.addAll(result);
}
// cancel the source
source.cancel();
runner.join();
// check for errors
if (error[0] != null) {
System.err.println("Error in asynchronous source runner");
error[0].printStackTrace();
fail("Error in asynchronous source runner");
}
final FromElementsFunction<Integer> sourceCopy = new FromElementsFunction<>(IntSerializer.INSTANCE, data);
StreamSource<Integer, FromElementsFunction<Integer>> srcCopy = new StreamSource<>(sourceCopy);
AbstractStreamOperatorTestHarness<Integer> testHarnessCopy =
new AbstractStreamOperatorTestHarness<>(srcCopy, 1, 1, 0);
testHarnessCopy.setup();
testHarnessCopy.initializeState(handles);
testHarnessCopy.open();
// recovery run
SourceFunction.SourceContext<Integer> newCtx = new ListSourceContext<>(checkpointData);
sourceCopy.run(newCtx);
assertEquals(data, checkpointData);
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
// ------------------------------------------------------------------------
// Test Types
// ------------------------------------------------------------------------
public static class MyPojo {
public long val1;
public int val2;
public MyPojo() {}
public MyPojo(long val1, int val2) {
this.val1 = val1;
this.val2 = val2;
}
@Override
public int hashCode() {
return this.val2;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof MyPojo) {
MyPojo that = (MyPojo) obj;
return this.val1 == that.val1 && this.val2 == that.val2;
}
else {
return false;
}
}
}
public static class SerializationErrorType implements Value {
private static final long serialVersionUID = -6037206294939421807L;
@Override
public void write(DataOutputView out) throws IOException {
throw new IOException("test exception");
}
@Override
public void read(DataInputView in) throws IOException {
throw new IOException("test exception");
}
}
public static class DeserializeTooMuchType implements Value {
private static final long serialVersionUID = -6037206294939421807L;
@Override
public void write(DataOutputView out) throws IOException {
out.writeInt(42);
}
@Override
public void read(DataInputView in) throws IOException {
in.readLong();
}
}
}
| |
/*
*
* Copyright 2010-2016 OrientDB LTD (http://orientdb.com)
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.server.network.protocol.http.multipart;
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.server.network.protocol.http.OHttpRequest;
import com.orientechnologies.orient.server.network.protocol.http.OHttpResponse;
import com.orientechnologies.orient.server.network.protocol.http.OHttpUtils;
import com.orientechnologies.orient.server.network.protocol.http.command.OServerCommandAuthenticatedDbAbstract;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedHashMap;
/** @author Luca Molino (molino.luca--at--gmail.com) */
public abstract class OHttpMultipartRequestCommand<B, F>
extends OServerCommandAuthenticatedDbAbstract {
private STATUS parseStatus = STATUS.STATUS_EXPECTED_BOUNDARY;
protected static enum STATUS {
STATUS_EXPECTED_BOUNDARY,
STATUS_EXPECTED_BOUNDARY_CRLF,
STATUS_EXPECTED_PART_HEADERS,
STATUS_EXPECTED_PART_CONTENT,
STATUS_EXPECTED_END_REQUEST
}
public HashMap<String, String> parse(
final OHttpRequest iRequest,
final OHttpResponse iResponse,
final OHttpMultipartContentParser<B> standardContentParser,
final OHttpMultipartContentParser<F> fileContentParser,
final ODatabaseDocument database)
throws Exception {
char currChar;
boolean endRequest = false;
final OHttpMultipartContentInputStream contentIn =
new OHttpMultipartContentInputStream(iRequest.getMultipartStream(), iRequest.getBoundary());
final HashMap<String, String> headers = new LinkedHashMap<String, String>();
int in;
try {
while (!endRequest && (in = iRequest.getMultipartStream().read()) > 0) {
currChar = (char) in;
switch (parseStatus) {
case STATUS_EXPECTED_BOUNDARY:
{
readBoundary(iRequest, iResponse, currChar);
parseStatus = STATUS.STATUS_EXPECTED_BOUNDARY_CRLF;
break;
}
case STATUS_EXPECTED_BOUNDARY_CRLF:
{
endRequest = readBoundaryCrLf(iRequest, iResponse, currChar, endRequest);
parseStatus = STATUS.STATUS_EXPECTED_PART_HEADERS;
break;
}
case STATUS_EXPECTED_PART_HEADERS:
{
parsePartHeaders(iRequest, iResponse, currChar, endRequest, headers);
parseStatus = STATUS.STATUS_EXPECTED_PART_CONTENT;
break;
}
case STATUS_EXPECTED_PART_CONTENT:
{
iRequest.getMultipartStream().setSkipInput(in);
contentIn.reset();
if (headers.get(OHttpUtils.MULTIPART_CONTENT_FILENAME) != null) {
parseFileContent(iRequest, fileContentParser, headers, contentIn, database);
} else {
parseBaseContent(iRequest, standardContentParser, headers, contentIn, database);
}
break;
}
case STATUS_EXPECTED_END_REQUEST:
{
iRequest.getMultipartStream().setSkipInput(in);
endRequest = OHttpMultipartHelper.isEndRequest(iRequest);
if (!endRequest) {
parseStatus = STATUS.STATUS_EXPECTED_BOUNDARY_CRLF;
} else {
parseStatus = STATUS.STATUS_EXPECTED_BOUNDARY;
}
break;
}
}
}
parseStatus = STATUS.STATUS_EXPECTED_BOUNDARY;
} catch (Exception e) {
throw e;
}
return headers;
}
protected boolean readBoundaryCrLf(
final OHttpRequest iRequest, final OHttpResponse iResponse, char currChar, boolean endRequest)
throws IOException {
int in;
if (currChar == '\r') {
in = iRequest.getMultipartStream().read();
currChar = (char) in;
if (currChar == '\n') {
return false;
}
} else if (currChar == '-') {
in = iRequest.getMultipartStream().read();
currChar = (char) in;
if (currChar == '-') {
endRequest = true;
} else {
iResponse.send(
OHttpUtils.STATUS_INVALIDMETHOD_CODE,
"Wrong request: Expected -",
OHttpUtils.CONTENT_TEXT_PLAIN,
"Wrong request: Expected -",
null);
endRequest = true;
}
} else {
iResponse.send(
OHttpUtils.STATUS_INVALIDMETHOD_CODE,
"Wrong request: Expected CR/LF",
OHttpUtils.CONTENT_TEXT_PLAIN,
"Wrong request: Expected CR/LF",
null);
endRequest = true;
}
return endRequest;
}
protected void readBoundary(
final OHttpRequest iRequest, final OHttpResponse iResponse, char currChar)
throws IOException {
int in;
int boundaryCursor = 0;
for (int i = 0; i < 2; i++) {
if (currChar != '-') {
iResponse.send(
OHttpUtils.STATUS_INVALIDMETHOD_CODE,
"Wrong request: Expected boundary",
OHttpUtils.CONTENT_TEXT_PLAIN,
"Wrong request: Expected boundary",
null);
return;
}
in = iRequest.getMultipartStream().read();
currChar = (char) in;
}
while (boundaryCursor < iRequest.getBoundary().length()) {
if (currChar != iRequest.getBoundary().charAt(boundaryCursor)) {
iResponse.send(
OHttpUtils.STATUS_INVALIDMETHOD_CODE,
"Wrong request: Expected boundary",
OHttpUtils.CONTENT_TEXT_PLAIN,
"Wrong request: Expected boundary",
null);
}
boundaryCursor++;
if (boundaryCursor < iRequest.getBoundary().length()) {
in = iRequest.getMultipartStream().read();
currChar = (char) in;
}
}
}
protected void parsePartHeaders(
final OHttpRequest iRequest,
final OHttpResponse iResponse,
char currChar,
boolean endRequest,
final HashMap<String, String> headers)
throws IOException {
int in;
StringBuilder headerName = new StringBuilder();
boolean endOfHeaders = false;
while (!endOfHeaders) {
headerName.append(currChar);
if (OHttpMultipartHelper.isMultipartPartHeader(headerName)) {
currChar = parseHeader(iRequest, iResponse, headers, headerName.toString());
headerName.setLength(0);
}
if (currChar == '\r') {
in = iRequest.getMultipartStream().read();
currChar = (char) in;
if (currChar == '\n') {
in = iRequest.getMultipartStream().read();
currChar = (char) in;
if (currChar == '\r') {
in = iRequest.getMultipartStream().read();
currChar = (char) in;
if (currChar == '\n') {
endOfHeaders = true;
}
}
}
} else {
in = iRequest.getMultipartStream().read();
currChar = (char) in;
}
}
}
protected char parseHeader(
final OHttpRequest iRequest,
final OHttpResponse iResponse,
HashMap<String, String> headers,
final String headerName)
throws IOException {
final StringBuilder header = new StringBuilder();
boolean endOfHeader = false;
int in;
char currChar;
in = iRequest.getMultipartStream().read();
currChar = (char) in;
if (currChar == ':') {
in = iRequest.getMultipartStream().read();
currChar = (char) in;
if (currChar != ' ') {
iResponse.send(
OHttpUtils.STATUS_INVALIDMETHOD_CODE,
"Wrong request part header: Expected ' ' (header: " + headerName + ")",
OHttpUtils.CONTENT_TEXT_PLAIN,
"Wrong request part header: Expected ' ' (header: " + headerName + ")",
null);
}
} else if (currChar != '=') {
iResponse.send(
OHttpUtils.STATUS_INVALIDMETHOD_CODE,
"Wrong request part header: Expected ':' (header: " + headerName + ")",
OHttpUtils.CONTENT_TEXT_PLAIN,
"Wrong request part header: Expected ':' (header: " + headerName + ")",
null);
}
while (!endOfHeader) {
in = iRequest.getMultipartStream().read();
currChar = (char) in;
if (currChar == ';') {
if (header.charAt(0) == '"') {
header.deleteCharAt(0);
}
if (header.charAt(header.length() - 1) == '"') {
header.deleteCharAt(header.length() - 1);
}
headers.put(headerName, header.toString());
in = iRequest.getMultipartStream().read();
return (char) in;
} else if (currChar == '\r') {
if (header.charAt(0) == '"') {
header.deleteCharAt(0);
}
if (header.charAt(header.length() - 1) == '"') {
header.deleteCharAt(header.length() - 1);
}
headers.put(headerName, header.toString());
return currChar;
}
header.append(currChar);
}
return currChar;
}
protected void parseBaseContent(
final OHttpRequest iRequest,
final OHttpMultipartContentParser<B> contentParser,
final HashMap<String, String> headers,
final OHttpMultipartContentInputStream in,
ODatabaseDocument database)
throws Exception {
B result = contentParser.parse(iRequest, headers, in, database);
parseStatus = STATUS.STATUS_EXPECTED_END_REQUEST;
processBaseContent(iRequest, result, headers);
}
protected void parseFileContent(
final OHttpRequest iRequest,
final OHttpMultipartContentParser<F> contentParser,
final HashMap<String, String> headers,
final OHttpMultipartContentInputStream in,
ODatabaseDocument database)
throws Exception {
F result = contentParser.parse(iRequest, headers, in, database);
parseStatus = STATUS.STATUS_EXPECTED_END_REQUEST;
processFileContent(iRequest, result, headers);
}
protected abstract void processBaseContent(
final OHttpRequest iRequest, B iContentResult, HashMap<String, String> headers)
throws Exception;
protected abstract void processFileContent(
final OHttpRequest iRequest, F iContentResult, HashMap<String, String> headers)
throws Exception;
protected abstract String getFileParamenterName();
protected abstract String getDocumentParamenterName();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.