diff
stringlengths 262
553k
| is_single_chunk
bool 2
classes | is_single_function
bool 1
class | buggy_function
stringlengths 20
391k
| fixed_function
stringlengths 0
392k
|
|---|---|---|---|---|
diff --git a/plugins/org.eclipse.xtext.ui.core/src/org/eclipse/xtext/ui/core/editor/model/XtextDocumentProvider.java b/plugins/org.eclipse.xtext.ui.core/src/org/eclipse/xtext/ui/core/editor/model/XtextDocumentProvider.java
index c36d49e4b..1f9d2297b 100644
--- a/plugins/org.eclipse.xtext.ui.core/src/org/eclipse/xtext/ui/core/editor/model/XtextDocumentProvider.java
+++ b/plugins/org.eclipse.xtext.ui.core/src/org/eclipse/xtext/ui/core/editor/model/XtextDocumentProvider.java
@@ -1,177 +1,177 @@
/*******************************************************************************
* Copyright (c) 2008 itemis AG (http://www.itemis.eu) and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
*******************************************************************************/
package org.eclipse.xtext.ui.core.editor.model;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.IResourceChangeEvent;
import org.eclipse.core.resources.IResourceChangeListener;
import org.eclipse.core.resources.IResourceDelta;
import org.eclipse.core.resources.IResourceDeltaVisitor;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.jface.text.IDocument;
import org.eclipse.ui.IEditorInput;
import org.eclipse.ui.editors.text.FileDocumentProvider;
import org.eclipse.xtext.resource.XtextResource;
import com.google.inject.Inject;
import com.google.inject.Provider;
/**
* @author Peter Friese - Initial contribution and API
* @author Sven Efftinge
*/
public class XtextDocumentProvider extends FileDocumentProvider {
private static final Logger log = Logger.getLogger(XtextDocumentProvider.class);
@Inject
private Provider<XtextDocument> document;
/**
* @author Sven Efftinge - Initial contribution and API
*
* updates referenced EMF Resources on IResourceChangeEvent
*/
private final class ReferencedResourcesUpdater implements IResourceChangeListener {
private final XtextDocument document;
private ReferencedResourcesUpdater(XtextDocument document) {
this.document = document;
}
public void resourceChanged(final IResourceChangeEvent event) {
final ResourceDeltaVisitor visitor = new ResourceDeltaVisitor(document);
try {
event.getDelta().accept(visitor);
} catch (CoreException e) {
log.error(e.getMessage(), e);
}
if (!visitor.deltas.isEmpty()) {
new Job("updating resourceset"){
@Override
protected IStatus run(IProgressMonitor monitor) {
document.modify(new UnitOfWork<Object>() {
public Object exec(XtextResource arg) throws Exception {
for (IResourceDelta delta : visitor.deltas) {
IResource res = delta.getResource();
String string = res.getFullPath().lastSegment();
ResourceSet set = arg.getResourceSet();
for(int i = 0; i < set.getResources().size(); ) {
final Resource emfResource = set.getResources().get(i);
if (emfResource!=null && string.equals(emfResource.getURI().lastSegment())) {
switch (delta.getKind()) {
case IResourceDelta.REMOVED:
// UNLOAD
document.modify(new UnitOfWork<Void>() {
public Void exec(XtextResource arg) throws Exception {
emfResource.unload();
return null;
}
});
if (emfResource.getResourceSet() != null)
set.getResources().remove(emfResource);
break;
case IResourceDelta.CHANGED:
// RELOAD
document.modify(new UnitOfWork<Void>() {
public Void exec(XtextResource arg) throws Exception {
emfResource.unload();
try {
emfResource.load(null);
} catch (IOException e) {
log.error(e.getMessage(), e);
}
return null;
}
});
break;
}
}
- if (set.getResources().get(i) == emfResource)
+ if (set.getResources().size() > i && set.getResources().get(i) == emfResource)
i++;
}
}
arg.reparse(document.get());
return null;
}
});
return Status.OK_STATUS;
}}.schedule();
}
}
}
/**
* @author Sven Efftinge - Initial contribution and API
* collects all interesting IResourceDeltas (those which are referenced by the managed EMF Resource)
*/
private final class ResourceDeltaVisitor implements IResourceDeltaVisitor {
private final XtextDocument document;
private ResourceDeltaVisitor(XtextDocument document) {
this.document = document;
}
public final List<IResourceDelta> deltas = new ArrayList<IResourceDelta>();
public boolean visit(IResourceDelta delta) throws CoreException {
IResource res = delta.getResource();
int kind = delta.getKind();
int flags = delta.getFlags();
if ((kind == IResourceDelta.REMOVED ||
(kind == IResourceDelta.CHANGED && ((IResourceDelta.CONTENT & flags) != 0)))
&& document.isReferenced(res)) {
deltas.add(delta);
}
return true;
}
}
private final List<IResourceChangeListener> resourceChangeListener = new ArrayList<IResourceChangeListener>();
@Override
protected IDocument createEmptyDocument() {
XtextDocument xtextDocument = document.get();
ReferencedResourcesUpdater listener = new ReferencedResourcesUpdater(xtextDocument);
resourceChangeListener.add(listener);
ResourcesPlugin.getWorkspace().addResourceChangeListener(listener,
IResourceChangeEvent.POST_CHANGE);
return xtextDocument;
}
@Override
protected void setupDocument(Object element, IDocument document) {
((XtextDocument) document).setInput((IEditorInput) element);
}
@Override
protected void disconnected() {
super.disconnected();
for (IResourceChangeListener listener : resourceChangeListener) {
ResourcesPlugin.getWorkspace().removeResourceChangeListener(listener);
}
resourceChangeListener.clear();
}
}
| true
| true
|
public void resourceChanged(final IResourceChangeEvent event) {
final ResourceDeltaVisitor visitor = new ResourceDeltaVisitor(document);
try {
event.getDelta().accept(visitor);
} catch (CoreException e) {
log.error(e.getMessage(), e);
}
if (!visitor.deltas.isEmpty()) {
new Job("updating resourceset"){
@Override
protected IStatus run(IProgressMonitor monitor) {
document.modify(new UnitOfWork<Object>() {
public Object exec(XtextResource arg) throws Exception {
for (IResourceDelta delta : visitor.deltas) {
IResource res = delta.getResource();
String string = res.getFullPath().lastSegment();
ResourceSet set = arg.getResourceSet();
for(int i = 0; i < set.getResources().size(); ) {
final Resource emfResource = set.getResources().get(i);
if (emfResource!=null && string.equals(emfResource.getURI().lastSegment())) {
switch (delta.getKind()) {
case IResourceDelta.REMOVED:
// UNLOAD
document.modify(new UnitOfWork<Void>() {
public Void exec(XtextResource arg) throws Exception {
emfResource.unload();
return null;
}
});
if (emfResource.getResourceSet() != null)
set.getResources().remove(emfResource);
break;
case IResourceDelta.CHANGED:
// RELOAD
document.modify(new UnitOfWork<Void>() {
public Void exec(XtextResource arg) throws Exception {
emfResource.unload();
try {
emfResource.load(null);
} catch (IOException e) {
log.error(e.getMessage(), e);
}
return null;
}
});
break;
}
}
if (set.getResources().get(i) == emfResource)
i++;
}
}
arg.reparse(document.get());
return null;
}
});
return Status.OK_STATUS;
}}.schedule();
}
}
|
public void resourceChanged(final IResourceChangeEvent event) {
final ResourceDeltaVisitor visitor = new ResourceDeltaVisitor(document);
try {
event.getDelta().accept(visitor);
} catch (CoreException e) {
log.error(e.getMessage(), e);
}
if (!visitor.deltas.isEmpty()) {
new Job("updating resourceset"){
@Override
protected IStatus run(IProgressMonitor monitor) {
document.modify(new UnitOfWork<Object>() {
public Object exec(XtextResource arg) throws Exception {
for (IResourceDelta delta : visitor.deltas) {
IResource res = delta.getResource();
String string = res.getFullPath().lastSegment();
ResourceSet set = arg.getResourceSet();
for(int i = 0; i < set.getResources().size(); ) {
final Resource emfResource = set.getResources().get(i);
if (emfResource!=null && string.equals(emfResource.getURI().lastSegment())) {
switch (delta.getKind()) {
case IResourceDelta.REMOVED:
// UNLOAD
document.modify(new UnitOfWork<Void>() {
public Void exec(XtextResource arg) throws Exception {
emfResource.unload();
return null;
}
});
if (emfResource.getResourceSet() != null)
set.getResources().remove(emfResource);
break;
case IResourceDelta.CHANGED:
// RELOAD
document.modify(new UnitOfWork<Void>() {
public Void exec(XtextResource arg) throws Exception {
emfResource.unload();
try {
emfResource.load(null);
} catch (IOException e) {
log.error(e.getMessage(), e);
}
return null;
}
});
break;
}
}
if (set.getResources().size() > i && set.getResources().get(i) == emfResource)
i++;
}
}
arg.reparse(document.get());
return null;
}
});
return Status.OK_STATUS;
}}.schedule();
}
}
|
diff --git a/iva.java b/iva.java
index 44c49b9..e562357 100644
--- a/iva.java
+++ b/iva.java
@@ -1,718 +1,720 @@
import java.awt.*;
import java.awt.event.*;
import java.awt.image.*;
import java.io.*;
import javax.imageio.*;
import java.math.*;
/**
* This class demonstrates how to load an Image from an external file
*/
public class iva {
private BufferedImage img = null;
private int width, height;
private double numberOfPixels;
private double mean = 0, std = 0;
private int[][] pixelScore;
private int[][] verticalLines, horizontalLines;
private int numvlines, numhlines;
private int jump = 5, window = 5;
private boolean[][] gridVerticalNoise,
gridHorizontalNoise,
gridDiagonalNoise,
gridGeneralNoise,
gridColorNoise,
gridCenterSurroundNoise;
// some common color code
private final int WHITE = 0xFFFFFF, BLACK = 0, GREY = 0x808080, BLUE = 0x0000FF, YELLOW = 0xFFFF00, GREEN = 0x008000;
private final int v0 = 100, v1 = 50, v2 = 25, v3 = 10, threshold = v0;
// factor
private final double upper = 1.25, lower = 0.75;
public iva(String imgPath){
try {
img = ImageIO.read(new File(imgPath));
getProperties();
} catch (IOException e){
e.printStackTrace();
}
}
public iva(BufferedImage img){
this.img = img;
getProperties();
}
public iva(iva x){
this.img = x.getBufferedImage();
this.height = x.getHeight();
this.width = x.getWidth();
this.numberOfPixels = height * width;
this.mean = x.getMean();
this.std = x.getStandardDeviation();
}
public void colorImageComplete(){
for (int i = 1; i < width - 3; i++){
for (int j = 0; j < height - 3; j++){
if (gridDiagonalNoise[i][j]) setColor(i - 1, j, YELLOW);
else if (gridHorizontalNoise[i][j]) setColor(i - 1, j, BLUE);
else if (gridVerticalNoise[i][j]) setColor(i - 1, j, GREEN);
else setColor(i - 1, j, BLACK);
}
}
output("colorcomplete.png");
}
public void computeMean(){
for(int i = 0; i < width; i++)
for (int j = 0; j < height; j++)
mean += (double)getAlphalessRGB(i, j) / numberOfPixels;
}
// computeMean() must be called before computing standard deviation
public void computeStandardDeviation(){
for(int i = 0; i < width; i++)
for (int j = 0; j < height; j++)
std += Math.pow((double)getAlphalessRGB(i, j) - mean, 2) / numberOfPixels;
std = Math.sqrt(std);
}
public int getAlphalessRGB(int x, int y){
return img.getRGB(x, y) & 0xFFFFFF;
}
public BufferedImage getBufferedImage(){
return this.img;
}
public int getHeight(){
return height;
}
public double getMean(){
return mean;
}
public int getRGB(int x, int y){
return getAlphalessRGB(x, y);
}
public double getStandardDeviation(){
return std;
}
public void getProperties(){
width = img.getWidth();
height = img.getHeight();
numberOfPixels = width * height;
pixelScore = new int[width][height];
gridVerticalNoise = new boolean[width][height];
gridHorizontalNoise = new boolean[width][height];
gridDiagonalNoise = new boolean[width][height];
gridGeneralNoise = new boolean[width][height];
gridColorNoise = new boolean[width][height];
gridCenterSurroundNoise = new boolean[width][height];
verticalLines = new int[1000][4];
horizontalLines = new int[1000][4];
}
public int getWidth(){
return width;
}
public void gradientSmoother(){
// assume mean and std is computed
int color;
for(int i = 0; i < width; i++){
for (int j = 0; j < height; j++){
color = getAlphalessRGB(i, j);
if ((double)color > mean + (double)upper * std){
setColor(i, j, WHITE);
//System.out.println("color:\t" + color + ", compared:\t" + (mean + (double)WHITE * std) + ", WHITE");
}else if ((double)color < mean - (double)lower * std){
setColor(i, j, BLACK);
//System.out.println("color:\t" + color + ", compared:\t" + (mean - (double)BLACK * std) + ", black");
}else{
setColor(i, j, GREY);
//System.out.println("color:\t" + color + ", compared:\t" + (mean + (double)black * std) + "," + (mean - (double)black * std) + " GREY");
}
}
}
output("gradientsmoothed.png");
}
public boolean isInHeightRange(int val){
if (val >= height || val < 0)
return false;
return true;
}
public boolean isInWidthRange(int val){
if (val >= width || val < 0)
return false;
return true;
}
public void lineComplete(){
for (int i = 0; i < width; i++){
for(int j = 0; j < height; j++){
if (getRGB(i, j) == WHITE){
int m ,n,v;
pixelScore[i][j] += v0;
for(int k = 1; k < 4; k++) {
if (k == 1) v = v1;
else if (k == 2) v = v2;
else v = v3;
for(m = i - k; m <= i + k; m++) {
if(m >= 0 && m < width && j - k >= 0)
pixelScore[m][j-k] += v;
if(m >= 0 && m < width && j + k < height)
pixelScore[m][j+k] += v;
}
for(n = j - k + 1; n < j + k; n++) {
if(i - k >= 0 && n >= 0 && n < height)
pixelScore[i-k][n] += v;
if(i + k < width && n >= 0 && n < height)
pixelScore[i+k][n] += v;
}
}
}
}
}
for (int i = 0; i < width; i++){
for (int j = 0; j < height; j++){
if (pixelScore[i][j] > threshold)
setColor(i, j, WHITE);
else
setColor(i, j, BLACK);
}
}
output("after_linecomplete.png");
}
//! line Draw complete need some clarification !!!!!
public void lineDrawComplete(){
int color;
for (int i = 0; i < height; i++){
for (int j = 0; j < width; j++){
color = getRGB(j, i);
if (color == GREEN){
// the pixel's color is green
int topx = j, topy = i, bottomx = j, bottomy = i;
int k = i,l = j, m, n;
for(n = 0; n <= window; n++) {
// find approrpiate m (vertically above the current pixel
// and within the jump range but still in green color)
for(m = 1; m <= jump && isInWidthRange(l + n) && isInHeightRange(k - m) && !(getRGB(l + n, k - m) == GREEN); m++);
if (m <= jump && k >= 0 && isInWidthRange(l + n) && isInHeightRange(k - m)) {
topx = l + n;
topy = k - m;
k = k - m;
l = l + n;
n = -1;
continue;
}
// similarly check horizontally to the left of current pixel
for(m = 1; m <= jump && (l - n) < width && (k - m) >= 0 && !(getRGB(l - n, k - m) == GREEN); m++);
if (m <= jump && k >= 0) {
topx = l - n;
topy = k - m;
k -= m;
l -= n;
n = -1;
continue;
}
}
// reset k, l index
k = i;
l = j;
for(n = 0; n <= window; n++) {
// find approrpiate m (vertically above the current pixel
// and within the jump range but still in green color)
- for(m = 1; m <= jump && !(getRGB(l + n, k + m) == GREEN); m++) ;
+ for(m = 1; m <= jump && isInHeightRange(k + m) && isInWidthRange(l + n) && !(getRGB(l + n, k + m) == GREEN); m++) ;
if (m <= jump && k < height) {
bottomx = l + n;
bottomy = k + m;
k += m;
l += n;
n = -1;
continue;
}
// similarly check horizontally to the left of current pixel
- for(m = 1; m <= jump && !(getRGB(l - n, k + m) == GREEN); m++) ;
+ for(m = 1; m <= jump && isInHeightRange(k + m) && isInWidthRange(l - n) && !(getRGB(l - n, k + m) == GREEN); m++) ;
- if (m <= jump && k < height) {
+ if (m <= jump && k < height && isInHeightRange(k + m) && isInWidthRange(l - n)) {
bottomx = l - n;
bottomy = k + m;
k += m;
l -= n;
n = -1;
continue;
}
}
if (i - topy > threshold || bottomy - i > threshold) {
for(k = i; k >= topy; k--)
for(l = j - window;l <= j + window; l++)
setColor(l, k, BLACK);
for(k = i;k <= bottomy; k++)
for(l = j - window;l <= j + window; l++)
setColor(l, k, BLACK);
// set vertical line vector
verticalLines[numvlines][0] = topx;
verticalLines[numvlines][1] = topy;
verticalLines[numvlines][2] = bottomx;
verticalLines[numvlines][3] = bottomy;
numvlines++;
}
}else if (color == BLUE){
int leftx = j, lefty = i, rightx = j, righty = i;
int k = i, l = j, m, n;
for(n = 0; n <= window; n++) {
- for(m = 1; m <= jump && !(getRGB(l - m, k + n) == BLUE); m++);
+ for(m = 1; m <= jump && isInHeightRange(k + n) && isInWidthRange(l - m) && !(getRGB(l - m, k + n) == BLUE); m++);
- if (m <= jump && l >= 0) {
+ if (m <= jump && l >= 0 && isInHeightRange(k + n) && isInWidthRange(l - m)) {
leftx = l - m;
lefty = k + n;
k += n;
l -= m;
n = -1;
continue;
}
for(m = 1; m <= jump && isInWidthRange(l - m) && isInHeightRange(k - n) && !(getRGB(l - m, k - n) == BLUE); m++) ;
if (m <= jump && l >= 0 && isInWidthRange(l - m) && isInHeightRange(k - n)) {
leftx = l - m;
lefty = k - n;
k -= n;
l -= m;
n = -1;
continue;
}
}
for(n = 0; n <= window; n++) {
- for(m = 1; m <= jump && !(getRGB(l + m, k + n) == BLUE); m++);
+ for(m = 1; m <= jump && isInHeightRange(k + n) && isInWidthRange(l + m) && !(getRGB(l + m, k + n) == BLUE); m++);
- if (m <= jump && l < width) {
+ if (m <= jump && l < width && isInHeightRange(k + n) && isInWidthRange(l + m)) {
rightx = l + m;
righty = k + n;
k += n;
l += m;
n = -1;
continue;
}
- for(m = 1; m <= jump && !(getRGB(l + m, k - n) == BLUE); m++);
+ for(m = 1; m <= jump && isInHeightRange(k - n) && isInWidthRange(l + m) && !(getRGB(l + m, k - n) == BLUE); m++);
- if (m <= jump && l < width) {
+ if (m <= jump && l < width && isInHeightRange(k - n) && isInWidthRange(l + m)) {
rightx = l + m;
righty = k - n;
k -= n;
l += m;
n = -1;
continue;
}
}
if (j - leftx > threshold || rightx - j > threshold) {
for(k = i - window; k <= i + window; k++)
for(l = j;l >= leftx; l--)
- setColor(l, k, BLACK);
+ if (isInWidthRange(l) && isInHeightRange(k))
+ setColor(l, k, BLACK);
for(k = i - window; k <= i + window; k++)
for(l = j; l <= rightx; l++)
- setColor(l, k, BLACK);
+ if (isInWidthRange(l) && isInHeightRange(k))
+ setColor(l, k, BLACK);
horizontalLines[numhlines][0] = leftx;
horizontalLines[numhlines][1] = lefty;
horizontalLines[numhlines][2] = rightx;
horizontalLines[numhlines][3] = righty;
numhlines++;
}
}
}
}
// first attempt to simulate line drawing
// we need a unified method to
// 1. convert BufferedImage object into Graphics2D object
Graphics2D g2d = img.createGraphics();
// 2. draw the line with specific color from specific starting point to end point
// set draw color
g2d.setColor(Color.GREEN);
int index = 0;
for (index = 0; index < numvlines; index++){
g2d.drawLine(verticalLines[index][0], verticalLines[index][1], verticalLines[index][2], verticalLines[index][3]);
}
g2d.setColor(Color.BLUE);
for (index = 0; index < numhlines; index++){
g2d.drawLine(verticalLines[index][0], verticalLines[index][1], verticalLines[index][2], verticalLines[index][3]);
}
// 3. release resource
g2d.dispose();
output("after_drawlinecomplete.png");
}
public void lineThinningComplete(){
// implementation goes here
}
public boolean noiseCenterSurround(int i, int j){
double surroundColorSum, surroundColorAvrg;
int pixelValue;
// Out of bounds, do not calculate
if (i + 2 >= width || j + 2 >= height)
return true;
// Analysis 3X3 space
// * * *
// * *
// * * *
surroundColorSum = getRGB(i, j) + getRGB(i, j + 1) + getRGB(i, j + 2) + getRGB(i + 1, j) +
getRGB(i + 1, j + 2) + getRGB(i + 2, j) + getRGB(i + 2, j + 1) + getRGB(i + 2, j + 2);
surroundColorAvrg = surroundColorSum / 8;
if ((double)getRGB(i + 1, j + 1) > surroundColorAvrg)
return true;
return false;
}
public boolean noiseColor(int i, int j){
int colors[] = new int[16], count[] = new int[16];
int ttlcolor, theColor;
int k, l, m, aPixel;
// Initalize arrays
for(k = 0; k < 16; k++){
colors[k] = -1;
count[k] = 0;
}
// Analysis 3X3 space
for(k = 0; k < 3 && (k + i) < width; k++){
for(l = 0; l < 3 && (l + j) < height; l++){
aPixel = getRGB(i + k, j + l);
for(m = 0; m < 16 && colors[m] != -1 && colors[m] != aPixel; m++);
if (m < 16){
if (colors[m] == -1) colors[m] = aPixel;
count[m]++;
}
}
}
// Copy into next Image
for(ttlcolor = 0; ttlcolor < 16 && colors[ttlcolor] != -1; ttlcolor++);
if (ttlcolor !=1)
return true;
return false;
}
public boolean noiseDiagonal(int i, int j){
double diagSumR, diagAvrgR, diagSumL, diagAvrgL,
remainderSumR, remainderAvrgR, remainderSumL, remainderAvrgL;
// Out of bounds, do not calculate
if (i + 2 >= width || j + 2 >= height)
return true;
// Analysis 3X3 space
// diagSumR diagSumL
// - - * * - -
// - * - - * -
// * - - - - *
diagSumR = getRGB(i + 2, j) + getRGB(i + 1, j + 1) + getRGB(i, j + 2);
diagAvrgR = diagSumR / 3;
diagSumL = getRGB(i, j) + getRGB(i + 1, j + 1) + getRGB(i + 2, j + 2);
diagAvrgL = diagSumL / 3;
remainderSumR = getRGB(i + 1, j) + getRGB(i + 2, j + 1) + getRGB(i + 1, j + 2) + getRGB(i, j + 1) + diagSumL - getRGB(i + 1, j + 1);
remainderAvrgR = remainderSumR / 6;
remainderSumL = remainderSumR + diagSumR - diagSumL;
remainderAvrgL = remainderSumL / 6;
// Copy into next Image
if (diagAvrgR > remainderAvrgR || diagAvrgL > remainderAvrgL)
return true;
return false;
}
public boolean noiseGeneral(int i, int j){
int points = 0;
// Out of bounds, do not calculate
if (i + 2 >= width || j + 2 >= height)
return true;
// Analysis 3X3 space
if (getRGB(i, j) > getRGB(i + 1, j)) points++;
if (getRGB(i + 2, j) > getRGB(i + 1, j)) points++;
if (getRGB(i + 1, j + 1) > getRGB(i, j + 1)) points++;
if (getRGB(i + 1, j + 1) > getRGB(i + 2, j + 1)) points++;
if (getRGB(i, j + 2) > getRGB(i + 1, j + 2)) points++;
if (getRGB(i + 2, j + 2) > getRGB(i + 1, j + 2)) points++;
// Copy into next Image
if (points > 3)
return true;
return false;
}
public boolean noiseHorizontal(int i, int j){
double horizontalSumT, horizontalSumB, horizontalAvrgT, horizontalAvrgB,
remainderSumT, remainderSumB, remainderAvrgT, remainderAvrgB;
// Out of bounds, do not calculate
if (i + 2 >= width || j + 2 >= height)
return true;
// Analysis 3X3 space
// vertical Top *, Bottom #, middle -
// * * *
// - - -
// # # #
horizontalSumT = getRGB(i, j) + getRGB(i + 1, j) + getRGB(i + 2, j);
horizontalAvrgT = horizontalSumT / 3;
horizontalSumB = getRGB(i, j + 2) + getRGB(i + 1, j + 2) + getRGB(i + 2, j + 2);
horizontalAvrgB = horizontalSumB / 3;
remainderSumT = getRGB(i, j + 1) + getRGB(i + 1, j + 1) + getRGB(i + 2, j + 1) + horizontalSumB;
remainderAvrgT = remainderSumT / 6;
// ???
remainderSumB = remainderSumT - horizontalSumB + horizontalSumT;
remainderAvrgB = remainderSumB / 6;
if (horizontalAvrgT > remainderAvrgT)
return true;
return false;
}
// return a boolean flag whether there exists noise within 3 x 3 section of pixels
public boolean noiseVertical(int i, int j){
// Out of bounds, do not calculate
if (width <= i + 2 || height <= j + 2)
return true;
double verticalSumR = 0, verticalSumL = 0, verticalAvrgR, verticalAvrgL,
remainderSumR = 0, remainderSumL = 0, remainderAvrgR, remainderAvrgL;
// Analysis 3X3 space
// vertical Left *, Right #, middle -
// * - #
// * - #
// * - #
verticalSumR += getRGB(i + 2, j) + getRGB(i + 2, j + 1) + getRGB(i + 2, j + 2);
verticalAvrgR = verticalSumR / 3;
verticalSumL += getRGB(i, j) + getRGB(i, j + 1) + getRGB(i, j + 2);
verticalAvrgL = verticalSumL / 3;
remainderSumR = verticalSumL + getRGB(i + 1, j) + getRGB(i + 1, j + 1) + getRGB(i + 1, j + 2);
// ??? remainderAvrgR is never used
remainderAvrgR = remainderSumR / 6;
remainderSumL = remainderSumR - verticalSumL + verticalSumR ;
remainderAvrgL = remainderSumL / 6;
if (verticalAvrgL > remainderAvrgL)
return true;
return false;
}
public void noiseReduction(){
noiseReduction(false);
}
public void noiseReduction(boolean flag){
for (int i = 0; i < width; i++){
for (int j = 0; j < height; j++){
if (noiseVertical(i, j))
gridVerticalNoise[i][j] = true;
if (noiseHorizontal(i, j))
gridHorizontalNoise[i][j] = true;
if (noiseDiagonal(i, j))
gridDiagonalNoise[i][j] = true;
if (noiseGeneral(i, j))
gridGeneralNoise[i][j] = true;
if (noiseCenterSurround(i, j))
gridCenterSurroundNoise[i][j] = true;
if (noiseColor(i, j))
gridColorNoise[i][j] = true;
}
}
String o;
if (flag)
o = "post_";
else
o = "";
noiseSuppression(gridVerticalNoise);
output(o + "after_vertical.png");
noiseSuppression(gridHorizontalNoise);
output(o + "after_horizontal.png");
noiseSuppression(gridDiagonalNoise);
output(o + "after_diagonal.png");
//noiseSuppression(gridGeneralNoise);
//output("after_general.png");
noiseSuppression(gridCenterSurroundNoise);
output(o + "after_centersurround.png");
noiseSuppression(gridColorNoise);
output(o + "after_color.png");
}
protected void noiseSuppression(boolean[][] grid){
for (int j = 0; j < height - 3; j++){
for (int i = 1; i <= width - 3; i++){
if ( ! grid[i][j])
setColor(i - 1, j, BLACK);
else
setColor(i - 1, j, WHITE);
}
}
}
public void output(){
outputImage("o_test.png", "PNG");
}
public void output(String fileName){
outputImage(fileName, "PNG");
}
public void outputImage(String fileName, String type){
try {
File f = new File("./o/" + fileName);
ImageIO.write(img, type, f);
} catch (IOException e){
e.printStackTrace();
}
}
public void preComputation(){
// pre computation steps
computeMean();
computeStandardDeviation();
}
public void setColor(int x, int y, int color){
img.setRGB(x, y, 0xFF000000 | color);
}
public void setImage(BufferedImage img){
this.img = img;
getProperties();
}
}
| false
| true
|
public void lineDrawComplete(){
int color;
for (int i = 0; i < height; i++){
for (int j = 0; j < width; j++){
color = getRGB(j, i);
if (color == GREEN){
// the pixel's color is green
int topx = j, topy = i, bottomx = j, bottomy = i;
int k = i,l = j, m, n;
for(n = 0; n <= window; n++) {
// find approrpiate m (vertically above the current pixel
// and within the jump range but still in green color)
for(m = 1; m <= jump && isInWidthRange(l + n) && isInHeightRange(k - m) && !(getRGB(l + n, k - m) == GREEN); m++);
if (m <= jump && k >= 0 && isInWidthRange(l + n) && isInHeightRange(k - m)) {
topx = l + n;
topy = k - m;
k = k - m;
l = l + n;
n = -1;
continue;
}
// similarly check horizontally to the left of current pixel
for(m = 1; m <= jump && (l - n) < width && (k - m) >= 0 && !(getRGB(l - n, k - m) == GREEN); m++);
if (m <= jump && k >= 0) {
topx = l - n;
topy = k - m;
k -= m;
l -= n;
n = -1;
continue;
}
}
// reset k, l index
k = i;
l = j;
for(n = 0; n <= window; n++) {
// find approrpiate m (vertically above the current pixel
// and within the jump range but still in green color)
for(m = 1; m <= jump && !(getRGB(l + n, k + m) == GREEN); m++) ;
if (m <= jump && k < height) {
bottomx = l + n;
bottomy = k + m;
k += m;
l += n;
n = -1;
continue;
}
// similarly check horizontally to the left of current pixel
for(m = 1; m <= jump && !(getRGB(l - n, k + m) == GREEN); m++) ;
if (m <= jump && k < height) {
bottomx = l - n;
bottomy = k + m;
k += m;
l -= n;
n = -1;
continue;
}
}
if (i - topy > threshold || bottomy - i > threshold) {
for(k = i; k >= topy; k--)
for(l = j - window;l <= j + window; l++)
setColor(l, k, BLACK);
for(k = i;k <= bottomy; k++)
for(l = j - window;l <= j + window; l++)
setColor(l, k, BLACK);
// set vertical line vector
verticalLines[numvlines][0] = topx;
verticalLines[numvlines][1] = topy;
verticalLines[numvlines][2] = bottomx;
verticalLines[numvlines][3] = bottomy;
numvlines++;
}
}else if (color == BLUE){
int leftx = j, lefty = i, rightx = j, righty = i;
int k = i, l = j, m, n;
for(n = 0; n <= window; n++) {
for(m = 1; m <= jump && !(getRGB(l - m, k + n) == BLUE); m++);
if (m <= jump && l >= 0) {
leftx = l - m;
lefty = k + n;
k += n;
l -= m;
n = -1;
continue;
}
for(m = 1; m <= jump && isInWidthRange(l - m) && isInHeightRange(k - n) && !(getRGB(l - m, k - n) == BLUE); m++) ;
if (m <= jump && l >= 0 && isInWidthRange(l - m) && isInHeightRange(k - n)) {
leftx = l - m;
lefty = k - n;
k -= n;
l -= m;
n = -1;
continue;
}
}
for(n = 0; n <= window; n++) {
for(m = 1; m <= jump && !(getRGB(l + m, k + n) == BLUE); m++);
if (m <= jump && l < width) {
rightx = l + m;
righty = k + n;
k += n;
l += m;
n = -1;
continue;
}
for(m = 1; m <= jump && !(getRGB(l + m, k - n) == BLUE); m++);
if (m <= jump && l < width) {
rightx = l + m;
righty = k - n;
k -= n;
l += m;
n = -1;
continue;
}
}
if (j - leftx > threshold || rightx - j > threshold) {
for(k = i - window; k <= i + window; k++)
for(l = j;l >= leftx; l--)
setColor(l, k, BLACK);
for(k = i - window; k <= i + window; k++)
for(l = j; l <= rightx; l++)
setColor(l, k, BLACK);
horizontalLines[numhlines][0] = leftx;
horizontalLines[numhlines][1] = lefty;
horizontalLines[numhlines][2] = rightx;
horizontalLines[numhlines][3] = righty;
numhlines++;
}
}
}
}
// first attempt to simulate line drawing
// we need a unified method to
// 1. convert BufferedImage object into Graphics2D object
Graphics2D g2d = img.createGraphics();
// 2. draw the line with specific color from specific starting point to end point
// set draw color
g2d.setColor(Color.GREEN);
int index = 0;
for (index = 0; index < numvlines; index++){
g2d.drawLine(verticalLines[index][0], verticalLines[index][1], verticalLines[index][2], verticalLines[index][3]);
}
g2d.setColor(Color.BLUE);
for (index = 0; index < numhlines; index++){
g2d.drawLine(verticalLines[index][0], verticalLines[index][1], verticalLines[index][2], verticalLines[index][3]);
}
// 3. release resource
g2d.dispose();
output("after_drawlinecomplete.png");
}
|
public void lineDrawComplete(){
int color;
for (int i = 0; i < height; i++){
for (int j = 0; j < width; j++){
color = getRGB(j, i);
if (color == GREEN){
// the pixel's color is green
int topx = j, topy = i, bottomx = j, bottomy = i;
int k = i,l = j, m, n;
for(n = 0; n <= window; n++) {
// find approrpiate m (vertically above the current pixel
// and within the jump range but still in green color)
for(m = 1; m <= jump && isInWidthRange(l + n) && isInHeightRange(k - m) && !(getRGB(l + n, k - m) == GREEN); m++);
if (m <= jump && k >= 0 && isInWidthRange(l + n) && isInHeightRange(k - m)) {
topx = l + n;
topy = k - m;
k = k - m;
l = l + n;
n = -1;
continue;
}
// similarly check horizontally to the left of current pixel
for(m = 1; m <= jump && (l - n) < width && (k - m) >= 0 && !(getRGB(l - n, k - m) == GREEN); m++);
if (m <= jump && k >= 0) {
topx = l - n;
topy = k - m;
k -= m;
l -= n;
n = -1;
continue;
}
}
// reset k, l index
k = i;
l = j;
for(n = 0; n <= window; n++) {
// find approrpiate m (vertically above the current pixel
// and within the jump range but still in green color)
for(m = 1; m <= jump && isInHeightRange(k + m) && isInWidthRange(l + n) && !(getRGB(l + n, k + m) == GREEN); m++) ;
if (m <= jump && k < height) {
bottomx = l + n;
bottomy = k + m;
k += m;
l += n;
n = -1;
continue;
}
// similarly check horizontally to the left of current pixel
for(m = 1; m <= jump && isInHeightRange(k + m) && isInWidthRange(l - n) && !(getRGB(l - n, k + m) == GREEN); m++) ;
if (m <= jump && k < height && isInHeightRange(k + m) && isInWidthRange(l - n)) {
bottomx = l - n;
bottomy = k + m;
k += m;
l -= n;
n = -1;
continue;
}
}
if (i - topy > threshold || bottomy - i > threshold) {
for(k = i; k >= topy; k--)
for(l = j - window;l <= j + window; l++)
setColor(l, k, BLACK);
for(k = i;k <= bottomy; k++)
for(l = j - window;l <= j + window; l++)
setColor(l, k, BLACK);
// set vertical line vector
verticalLines[numvlines][0] = topx;
verticalLines[numvlines][1] = topy;
verticalLines[numvlines][2] = bottomx;
verticalLines[numvlines][3] = bottomy;
numvlines++;
}
}else if (color == BLUE){
int leftx = j, lefty = i, rightx = j, righty = i;
int k = i, l = j, m, n;
for(n = 0; n <= window; n++) {
for(m = 1; m <= jump && isInHeightRange(k + n) && isInWidthRange(l - m) && !(getRGB(l - m, k + n) == BLUE); m++);
if (m <= jump && l >= 0 && isInHeightRange(k + n) && isInWidthRange(l - m)) {
leftx = l - m;
lefty = k + n;
k += n;
l -= m;
n = -1;
continue;
}
for(m = 1; m <= jump && isInWidthRange(l - m) && isInHeightRange(k - n) && !(getRGB(l - m, k - n) == BLUE); m++) ;
if (m <= jump && l >= 0 && isInWidthRange(l - m) && isInHeightRange(k - n)) {
leftx = l - m;
lefty = k - n;
k -= n;
l -= m;
n = -1;
continue;
}
}
for(n = 0; n <= window; n++) {
for(m = 1; m <= jump && isInHeightRange(k + n) && isInWidthRange(l + m) && !(getRGB(l + m, k + n) == BLUE); m++);
if (m <= jump && l < width && isInHeightRange(k + n) && isInWidthRange(l + m)) {
rightx = l + m;
righty = k + n;
k += n;
l += m;
n = -1;
continue;
}
for(m = 1; m <= jump && isInHeightRange(k - n) && isInWidthRange(l + m) && !(getRGB(l + m, k - n) == BLUE); m++);
if (m <= jump && l < width && isInHeightRange(k - n) && isInWidthRange(l + m)) {
rightx = l + m;
righty = k - n;
k -= n;
l += m;
n = -1;
continue;
}
}
if (j - leftx > threshold || rightx - j > threshold) {
for(k = i - window; k <= i + window; k++)
for(l = j;l >= leftx; l--)
if (isInWidthRange(l) && isInHeightRange(k))
setColor(l, k, BLACK);
for(k = i - window; k <= i + window; k++)
for(l = j; l <= rightx; l++)
if (isInWidthRange(l) && isInHeightRange(k))
setColor(l, k, BLACK);
horizontalLines[numhlines][0] = leftx;
horizontalLines[numhlines][1] = lefty;
horizontalLines[numhlines][2] = rightx;
horizontalLines[numhlines][3] = righty;
numhlines++;
}
}
}
}
// first attempt to simulate line drawing
// we need a unified method to
// 1. convert BufferedImage object into Graphics2D object
Graphics2D g2d = img.createGraphics();
// 2. draw the line with specific color from specific starting point to end point
// set draw color
g2d.setColor(Color.GREEN);
int index = 0;
for (index = 0; index < numvlines; index++){
g2d.drawLine(verticalLines[index][0], verticalLines[index][1], verticalLines[index][2], verticalLines[index][3]);
}
g2d.setColor(Color.BLUE);
for (index = 0; index < numhlines; index++){
g2d.drawLine(verticalLines[index][0], verticalLines[index][1], verticalLines[index][2], verticalLines[index][3]);
}
// 3. release resource
g2d.dispose();
output("after_drawlinecomplete.png");
}
|
diff --git a/Plugins/org.opendarts.core.stats/src/main/java/org/opendarts/core/stats/model/impl/AverageStatsEntry.java b/Plugins/org.opendarts.core.stats/src/main/java/org/opendarts/core/stats/model/impl/AverageStatsEntry.java
index df61ad4..a05ac22 100644
--- a/Plugins/org.opendarts.core.stats/src/main/java/org/opendarts/core/stats/model/impl/AverageStatsEntry.java
+++ b/Plugins/org.opendarts.core.stats/src/main/java/org/opendarts/core/stats/model/impl/AverageStatsEntry.java
@@ -1,165 +1,165 @@
/*
*
*/
package org.opendarts.core.stats.model.impl;
import java.util.Comparator;
import org.opendarts.core.model.dart.IDartsThrow;
import org.opendarts.core.model.game.IGame;
import org.opendarts.core.model.game.IGameEntry;
import org.opendarts.core.model.player.IPlayer;
/**
* The Class MaxStatsEntry.
*
* @param <T> the generic type
*/
public abstract class AverageStatsEntry extends AbstractStatsEntry<AvgEntry> {
/**
* Instantiates a new best stats entry.
*
* @param comparator the comparator
*/
public AverageStatsEntry(String key) {
super(key);
}
/* (non-Javadoc)
* @see org.opendarts.core.stats.model.IStatsEntry#getComparator()
*/
@Override
public Comparator<AvgEntry> getComparator() {
return new Comparator<AvgEntry>() {
@Override
public int compare(AvgEntry o1, AvgEntry o2) {
int result;
if (o1==null && o2!=null) {
result = -1;
} else if (o2!=null && o1==null) {
result = 1;
} else if (o1==null && o2==null) {
result = 0;
} else {
double diff = o1.getAvg() - o2.getAvg();
- if (diff < 0.005) {
+ if (Math.abs(diff) < 0.005) {
result = 0;
} else if (diff>0) {
result = 1;
} else {
result = -1;
}
}
return result;
}
};
}
/* (non-Javadoc)
* @see org.opendarts.prototype.internal.model.stats.AbstractStatsEntry#handleDartsThrow(org.opendarts.prototype.model.game.IGame, org.opendarts.prototype.model.player.IPlayer, org.opendarts.prototype.model.game.IGameEntry, org.opendarts.prototype.model.dart.IDartsThrow)
*/
@Override
public boolean handleDartsThrow(IGame game, IPlayer player,
IGameEntry gameEntry, IDartsThrow dartsThrow) {
return this.addNewInput(
this.getEntryIncr(game, player, gameEntry, dartsThrow),
this.getEntryValue(game, player, gameEntry, dartsThrow));
}
/**
* Gets the entry incr.
*
* @param game the game
* @param player the player
* @param gameEntry the game entry
* @param dartsThrow the darts throw
* @return the entry incr
*/
protected Number getEntryIncr(IGame game, IPlayer player,
IGameEntry gameEntry, IDartsThrow dartsThrow) {
return 1D;
}
/**
* Gets the entry value.
*
* @param game the game
* @param player the player
* @param gameEntry the game entry
* @param dartsThrow the darts throw
* @return the entry value
*/
protected abstract Number getEntryValue(IGame game, IPlayer player,
IGameEntry gameEntry, IDartsThrow dartsThrow);
/**
* Adds the new input.
*
* @param input the input
* @return true, if successful
*/
protected boolean addNewInput(Number incr, Number input) {
if (input != null) {
StatsValue<AvgEntry> value = (StatsValue<AvgEntry>) this.getValue();
if (value == null) {
// new value
value = new StatsValue<AvgEntry>();
this.setValue(value);
AvgEntry entry = new AvgEntry();
entry.addValue(incr, input);
value.setValue(entry);
} else {
AvgEntry entry = value.getValue();
entry.addValue(incr, input);
}
}
return true;
}
/* (non-Javadoc)
* @see org.opendarts.prototype.internal.model.stats.AbstractStatsEntry#getInput(org.opendarts.prototype.model.game.IGame, org.opendarts.prototype.model.player.IPlayer, org.opendarts.prototype.model.game.IGameEntry, org.opendarts.prototype.model.dart.IDartsThrow)
*/
@Override
protected AvgEntry getInput(IGame game, IPlayer player,
IGameEntry gameEntry, IDartsThrow dartsThrow) {
// not called
return null;
}
/* (non-Javadoc)
* @see org.opendarts.prototype.internal.model.stats.AbstractStatsEntry#getUndoInput(org.opendarts.prototype.model.game.IGame, org.opendarts.prototype.model.player.IPlayer, org.opendarts.prototype.model.game.IGameEntry, org.opendarts.prototype.model.dart.IDartsThrow)
*/
@Override
protected AvgEntry getUndoInput(IGame game, IPlayer player,
IGameEntry gameEntry, IDartsThrow dartsThrow) {
return this.getInput(game, player, gameEntry, dartsThrow);
}
/* (non-Javadoc)
* @see org.opendarts.prototype.internal.model.stats.AbstractStatsEntry#undoDartsThrow(org.opendarts.prototype.model.game.IGame, org.opendarts.prototype.model.player.IPlayer, org.opendarts.prototype.model.game.IGameEntry, org.opendarts.prototype.model.dart.IDartsThrow)
*/
@Override
public boolean undoDartsThrow(IGame game, IPlayer player,
IGameEntry gameEntry, IDartsThrow dartsThrow) {
return this.removeNewInput(
this.getEntryIncr(game, player, gameEntry, dartsThrow),
this.getEntryValue(game, player, gameEntry, dartsThrow));
}
/* (non-Javadoc)
* @see org.opendarts.prototype.internal.model.stats.AbstractStatsEntry#undoNewInput(java.lang.Object)
*/
protected boolean removeNewInput(Number incr, Number input) {
boolean result = false;
if (input != null) {
StatsValue<AvgEntry> value = (StatsValue<AvgEntry>) this.getValue();
if (value != null) {
AvgEntry entry = value.getValue();
entry.removeValue(incr, input);
result = true;
}
}
return result;
}
}
| true
| true
|
public Comparator<AvgEntry> getComparator() {
return new Comparator<AvgEntry>() {
@Override
public int compare(AvgEntry o1, AvgEntry o2) {
int result;
if (o1==null && o2!=null) {
result = -1;
} else if (o2!=null && o1==null) {
result = 1;
} else if (o1==null && o2==null) {
result = 0;
} else {
double diff = o1.getAvg() - o2.getAvg();
if (diff < 0.005) {
result = 0;
} else if (diff>0) {
result = 1;
} else {
result = -1;
}
}
return result;
}
};
}
|
public Comparator<AvgEntry> getComparator() {
return new Comparator<AvgEntry>() {
@Override
public int compare(AvgEntry o1, AvgEntry o2) {
int result;
if (o1==null && o2!=null) {
result = -1;
} else if (o2!=null && o1==null) {
result = 1;
} else if (o1==null && o2==null) {
result = 0;
} else {
double diff = o1.getAvg() - o2.getAvg();
if (Math.abs(diff) < 0.005) {
result = 0;
} else if (diff>0) {
result = 1;
} else {
result = -1;
}
}
return result;
}
};
}
|
diff --git a/org.spearce.jgit/src/org/spearce/jgit/lib/Repository.java b/org.spearce.jgit/src/org/spearce/jgit/lib/Repository.java
index b27c23df..730a2672 100644
--- a/org.spearce.jgit/src/org/spearce/jgit/lib/Repository.java
+++ b/org.spearce.jgit/src/org/spearce/jgit/lib/Repository.java
@@ -1,1153 +1,1153 @@
/*
* Copyright (C) 2007, Dave Watson <dwatson@mimvista.com>
* Copyright (C) 2008, Robin Rosenberg <robin.rosenberg@dewire.com>
* Copyright (C) 2008, Shawn O. Pearce <spearce@spearce.org>
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Git Development Community nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.spearce.jgit.lib;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import org.spearce.jgit.errors.IncorrectObjectTypeException;
import org.spearce.jgit.errors.RevisionSyntaxException;
import org.spearce.jgit.stgit.StGitPatch;
import org.spearce.jgit.util.FS;
/**
* Represents a Git repository. A repository holds all objects and refs used for
* managing source code (could by any type of file, but source code is what
* SCM's are typically used for).
*
* In Git terms all data is stored in GIT_DIR, typically a directory called
* .git. A work tree is maintained unless the repository is a bare repository.
* Typically the .git directory is located at the root of the work dir.
*
* <ul>
* <li>GIT_DIR
* <ul>
* <li>objects/ - objects</li>
* <li>refs/ - tags and heads</li>
* <li>config - configuration</li>
* <li>info/ - more configurations</li>
* </ul>
* </li>
* </ul>
*
* This implementation only handles a subtly undocumented subset of git features.
*
*/
public class Repository {
private final File gitDir;
private final File[] objectsDirs;
private final RepositoryConfig config;
private final RefDatabase refs;
private PackFile[] packs;
private GitIndex index;
private List<RepositoryListener> listeners = new Vector<RepositoryListener>(); // thread safe
static private List<RepositoryListener> allListeners = new Vector<RepositoryListener>(); // thread safe
/**
* Construct a representation of a Git repository.
*
* @param d
* GIT_DIR (the location of the repository metadata).
* @throws IOException
* the repository appears to already exist but cannot be
* accessed.
*/
public Repository(final File d) throws IOException {
gitDir = d.getAbsoluteFile();
try {
objectsDirs = readObjectsDirs(FS.resolve(gitDir, "objects"),
new ArrayList<File>()).toArray(new File[0]);
} catch (IOException e) {
IOException ex = new IOException("Cannot find all object dirs for " + gitDir);
ex.initCause(e);
throw ex;
}
refs = new RefDatabase(this);
packs = new PackFile[0];
config = new RepositoryConfig(this);
final boolean isExisting = objectsDirs[0].exists();
if (isExisting) {
getConfig().load();
final String repositoryFormatVersion = getConfig().getString(
"core", null, "repositoryFormatVersion");
if (!"0".equals(repositoryFormatVersion)) {
throw new IOException("Unknown repository format \""
+ repositoryFormatVersion + "\"; expected \"0\".");
}
} else {
getConfig().create();
}
if (isExisting)
scanForPacks();
}
private Collection<File> readObjectsDirs(File objectsDir, Collection<File> ret) throws IOException {
ret.add(objectsDir);
final File altFile = FS.resolve(objectsDir, "info/alternates");
if (altFile.exists()) {
BufferedReader ar = new BufferedReader(new FileReader(altFile));
for (String alt=ar.readLine(); alt!=null; alt=ar.readLine()) {
readObjectsDirs(FS.resolve(objectsDir, alt), ret);
}
ar.close();
}
return ret;
}
/**
* Create a new Git repository initializing the necessary files and
* directories.
*
* @throws IOException
*/
public void create() throws IOException {
if (gitDir.exists()) {
throw new IllegalStateException("Repository already exists: "
+ gitDir);
}
gitDir.mkdirs();
refs.create();
objectsDirs[0].mkdirs();
new File(objectsDirs[0], "pack").mkdir();
new File(objectsDirs[0], "info").mkdir();
new File(gitDir, "branches").mkdir();
new File(gitDir, "remotes").mkdir();
final String master = Constants.R_HEADS + Constants.MASTER;
refs.link(Constants.HEAD, master);
getConfig().create();
getConfig().save();
}
/**
* @return GIT_DIR
*/
public File getDirectory() {
return gitDir;
}
/**
* @return the directory containing the objects owned by this repository.
*/
public File getObjectsDirectory() {
return objectsDirs[0];
}
/**
* @return the configuration of this repository
*/
public RepositoryConfig getConfig() {
return config;
}
/**
* Construct a filename where the loose object having a specified SHA-1
* should be stored. If the object is stored in a shared repository the path
* to the alternative repo will be returned. If the object is not yet store
* a usable path in this repo will be returned. It is assumed that callers
* will look for objects in a pack first.
*
* @param objectId
* @return suggested file name
*/
public File toFile(final AnyObjectId objectId) {
final String n = objectId.name();
String d=n.substring(0, 2);
String f=n.substring(2);
for (int i=0; i<objectsDirs.length; ++i) {
File ret = new File(new File(objectsDirs[i], d), f);
if (ret.exists())
return ret;
}
return new File(new File(objectsDirs[0], d), f);
}
/**
* @param objectId
* @return true if the specified object is stored in this repo or any of the
* known shared repositories.
*/
public boolean hasObject(final AnyObjectId objectId) {
int k = packs.length;
if (k > 0) {
do {
if (packs[--k].hasObject(objectId))
return true;
} while (k > 0);
}
return toFile(objectId).isFile();
}
/**
* @param id
* SHA-1 of an object.
*
* @return a {@link ObjectLoader} for accessing the data of the named
* object, or null if the object does not exist.
* @throws IOException
*/
public ObjectLoader openObject(final AnyObjectId id)
throws IOException {
return openObject(new WindowCursor(),id);
}
/**
* @param curs
* temporary working space associated with the calling thread.
* @param id
* SHA-1 of an object.
*
* @return a {@link ObjectLoader} for accessing the data of the named
* object, or null if the object does not exist.
* @throws IOException
*/
public ObjectLoader openObject(final WindowCursor curs, final AnyObjectId id)
throws IOException {
int k = packs.length;
if (k > 0) {
do {
try {
final ObjectLoader ol = packs[--k].get(curs, id);
if (ol != null)
return ol;
} catch (IOException ioe) {
// This shouldn't happen unless the pack was corrupted
// after we opened it or the VM runs out of memory. This is
// a know problem with memory mapped I/O in java and have
// been noticed with JDK < 1.6. Tell the gc that now is a good
// time to collect and try once more.
try {
curs.release();
System.gc();
final ObjectLoader ol = packs[k].get(curs, id);
if (ol != null)
return ol;
} catch (IOException ioe2) {
ioe2.printStackTrace();
ioe.printStackTrace();
// Still fails.. that's BAD, maybe the pack has
// been corrupted after all, or the gc didn't manage
// to release enough previously mmaped areas.
}
}
} while (k > 0);
}
try {
return new UnpackedObjectLoader(this, id.toObjectId());
} catch (FileNotFoundException fnfe) {
return null;
}
}
/**
* Open object in all packs containing specified object.
*
* @param objectId
* id of object to search for
* @param curs
* temporary working space associated with the calling thread.
* @return collection of loaders for this object, from all packs containing
* this object
* @throws IOException
*/
public Collection<PackedObjectLoader> openObjectInAllPacks(
final AnyObjectId objectId, final WindowCursor curs)
throws IOException {
Collection<PackedObjectLoader> result = new LinkedList<PackedObjectLoader>();
openObjectInAllPacks(objectId, result, curs);
return result;
}
/**
* Open object in all packs containing specified object.
*
* @param objectId
* id of object to search for
* @param resultLoaders
* result collection of loaders for this object, filled with
* loaders from all packs containing specified object
* @param curs
* temporary working space associated with the calling thread.
* @throws IOException
*/
void openObjectInAllPacks(final AnyObjectId objectId,
final Collection<PackedObjectLoader> resultLoaders,
final WindowCursor curs) throws IOException {
for (PackFile pack : packs) {
final PackedObjectLoader loader = pack.get(curs, objectId);
if (loader != null)
resultLoaders.add(loader);
}
}
/**
* @param id
* SHA'1 of a blob
* @return an {@link ObjectLoader} for accessing the data of a named blob
* @throws IOException
*/
public ObjectLoader openBlob(final ObjectId id) throws IOException {
return openObject(id);
}
/**
* @param id
* SHA'1 of a tree
* @return an {@link ObjectLoader} for accessing the data of a named tree
* @throws IOException
*/
public ObjectLoader openTree(final ObjectId id) throws IOException {
return openObject(id);
}
/**
* Access a Commit object using a symbolic reference. This reference may
* be a SHA-1 or ref in combination with a number of symbols translating
* from one ref or SHA1-1 to another, such as HEAD^ etc.
*
* @param revstr a reference to a git commit object
* @return a Commit named by the specified string
* @throws IOException for I/O error or unexpected object type.
*
* @see #resolve(String)
*/
public Commit mapCommit(final String revstr) throws IOException {
final ObjectId id = resolve(revstr);
return id != null ? mapCommit(id) : null;
}
/**
* Access any type of Git object by id and
*
* @param id
* SHA-1 of object to read
* @param refName optional, only relevant for simple tags
* @return The Git object if found or null
* @throws IOException
*/
public Object mapObject(final ObjectId id, final String refName) throws IOException {
final ObjectLoader or = openObject(id);
if (or == null)
return null;
final byte[] raw = or.getBytes();
if (or.getType() == Constants.OBJ_TREE)
return makeTree(id, raw);
if (or.getType() == Constants.OBJ_COMMIT)
return makeCommit(id, raw);
if (or.getType() == Constants.OBJ_TAG)
return makeTag(id, refName, raw);
if (or.getType() == Constants.OBJ_BLOB)
return raw;
throw new IncorrectObjectTypeException(id,
"COMMIT nor TREE nor BLOB nor TAG");
}
/**
* Access a Commit by SHA'1 id.
* @param id
* @return Commit or null
* @throws IOException for I/O error or unexpected object type.
*/
public Commit mapCommit(final ObjectId id) throws IOException {
final ObjectLoader or = openObject(id);
if (or == null)
return null;
final byte[] raw = or.getBytes();
if (Constants.OBJ_COMMIT == or.getType())
return new Commit(this, id, raw);
throw new IncorrectObjectTypeException(id, Constants.TYPE_COMMIT);
}
private Commit makeCommit(final ObjectId id, final byte[] raw) {
Commit ret = new Commit(this, id, raw);
return ret;
}
/**
* Access a Tree object using a symbolic reference. This reference may
* be a SHA-1 or ref in combination with a number of symbols translating
* from one ref or SHA1-1 to another, such as HEAD^{tree} etc.
*
* @param revstr a reference to a git commit object
* @return a Tree named by the specified string
* @throws IOException
*
* @see #resolve(String)
*/
public Tree mapTree(final String revstr) throws IOException {
final ObjectId id = resolve(revstr);
return id != null ? mapTree(id) : null;
}
/**
* Access a Tree by SHA'1 id.
* @param id
* @return Tree or null
* @throws IOException for I/O error or unexpected object type.
*/
public Tree mapTree(final ObjectId id) throws IOException {
final ObjectLoader or = openObject(id);
if (or == null)
return null;
final byte[] raw = or.getBytes();
if (Constants.OBJ_TREE == or.getType()) {
return new Tree(this, id, raw);
}
if (Constants.OBJ_COMMIT == or.getType())
return mapTree(ObjectId.fromString(raw, 5));
throw new IncorrectObjectTypeException(id, Constants.TYPE_TREE);
}
private Tree makeTree(final ObjectId id, final byte[] raw) throws IOException {
Tree ret = new Tree(this, id, raw);
return ret;
}
private Tag makeTag(final ObjectId id, final String refName, final byte[] raw) {
Tag ret = new Tag(this, id, refName, raw);
return ret;
}
/**
* Access a tag by symbolic name.
*
* @param revstr
* @return a Tag or null
* @throws IOException on I/O error or unexpected type
*/
public Tag mapTag(String revstr) throws IOException {
final ObjectId id = resolve(revstr);
return id != null ? mapTag(revstr, id) : null;
}
/**
* Access a Tag by SHA'1 id
* @param refName
* @param id
* @return Commit or null
* @throws IOException for I/O error or unexpected object type.
*/
public Tag mapTag(final String refName, final ObjectId id) throws IOException {
final ObjectLoader or = openObject(id);
if (or == null)
return null;
final byte[] raw = or.getBytes();
if (Constants.OBJ_TAG == or.getType())
return new Tag(this, id, refName, raw);
return new Tag(this, id, refName, null);
}
/**
* Create a command to update, create or delete a ref in this repository.
*
* @param ref
* name of the ref the caller wants to modify.
* @return an update command. The caller must finish populating this command
* and then invoke one of the update methods to actually make a
* change.
* @throws IOException
* a symbolic ref was passed in and could not be resolved back
* to the base ref, as the symbolic ref could not be read.
*/
public RefUpdate updateRef(final String ref) throws IOException {
return refs.newUpdate(ref);
}
/**
* Parse a git revision string and return an object id.
*
* Currently supported is combinations of these.
* <ul>
* <li>SHA-1 - a SHA-1</li>
* <li>refs/... - a ref name</li>
* <li>ref^n - nth parent reference</li>
* <li>ref~n - distance via parent reference</li>
* <li>ref@{n} - nth version of ref</li>
* <li>ref^{tree} - tree references by ref</li>
* <li>ref^{commit} - commit references by ref</li>
* </ul>
*
* Not supported is
* <ul>
* <li>timestamps in reflogs, ref@{full or relative timestamp}</li>
* <li>abbreviated SHA-1's</li>
* </ul>
*
* @param revstr A git object references expression
* @return an ObjectId or null if revstr can't be resolved to any ObjectId
* @throws IOException on serious errors
*/
public ObjectId resolve(final String revstr) throws IOException {
char[] rev = revstr.toCharArray();
Object ref = null;
ObjectId refId = null;
for (int i = 0; i < rev.length; ++i) {
switch (rev[i]) {
case '^':
if (refId == null) {
String refstr = new String(rev,0,i);
refId = resolveSimple(refstr);
if (refId == null)
return null;
}
if (i + 1 < rev.length) {
switch (rev[i + 1]) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
int j;
ref = mapObject(refId, null);
if (!(ref instanceof Commit))
throw new IncorrectObjectTypeException(refId, Constants.TYPE_COMMIT);
for (j=i+1; j<rev.length; ++j) {
if (!Character.isDigit(rev[j]))
break;
}
String parentnum = new String(rev, i+1, j-i-1);
int pnum;
try {
pnum = Integer.parseInt(parentnum);
} catch (NumberFormatException e) {
throw new RevisionSyntaxException(
"Invalid commit parent number",
revstr);
}
if (pnum != 0) {
final ObjectId parents[] = ((Commit) ref)
.getParentIds();
if (pnum > parents.length)
refId = null;
else
refId = parents[pnum - 1];
}
i = j - 1;
break;
case '{':
int k;
String item = null;
for (k=i+2; k<rev.length; ++k) {
if (rev[k] == '}') {
item = new String(rev, i+2, k-i-2);
break;
}
}
i = k;
if (item != null)
if (item.equals("tree")) {
ref = mapObject(refId, null);
while (ref instanceof Tag) {
Tag t = (Tag)ref;
refId = t.getObjId();
ref = mapObject(refId, null);
}
if (ref instanceof Treeish)
refId = ((Treeish)ref).getTreeId();
else
throw new IncorrectObjectTypeException(refId, Constants.TYPE_TREE);
}
else if (item.equals("commit")) {
ref = mapObject(refId, null);
while (ref instanceof Tag) {
Tag t = (Tag)ref;
refId = t.getObjId();
ref = mapObject(refId, null);
}
if (!(ref instanceof Commit))
throw new IncorrectObjectTypeException(refId, Constants.TYPE_COMMIT);
}
else if (item.equals("blob")) {
ref = mapObject(refId, null);
while (ref instanceof Tag) {
Tag t = (Tag)ref;
refId = t.getObjId();
ref = mapObject(refId, null);
}
if (!(ref instanceof byte[]))
- throw new IncorrectObjectTypeException(refId, Constants.TYPE_COMMIT);
+ throw new IncorrectObjectTypeException(refId, Constants.TYPE_BLOB);
}
else if (item.equals("")) {
ref = mapObject(refId, null);
if (ref instanceof Tag)
refId = ((Tag)ref).getObjId();
else {
// self
}
}
else
throw new RevisionSyntaxException(revstr);
else
throw new RevisionSyntaxException(revstr);
break;
default:
ref = mapObject(refId, null);
if (ref instanceof Commit) {
final ObjectId parents[] = ((Commit) ref)
.getParentIds();
if (parents.length == 0)
refId = null;
else
refId = parents[0];
} else
throw new IncorrectObjectTypeException(refId, Constants.TYPE_COMMIT);
}
} else {
ref = mapObject(refId, null);
if (ref instanceof Commit) {
final ObjectId parents[] = ((Commit) ref)
.getParentIds();
if (parents.length == 0)
refId = null;
else
refId = parents[0];
} else
throw new IncorrectObjectTypeException(refId, Constants.TYPE_COMMIT);
}
break;
case '~':
if (ref == null) {
String refstr = new String(rev,0,i);
refId = resolveSimple(refstr);
ref = mapCommit(refId);
}
int l;
for (l = i + 1; l < rev.length; ++l) {
if (!Character.isDigit(rev[l]))
break;
}
String distnum = new String(rev, i+1, l-i-1);
int dist;
try {
dist = Integer.parseInt(distnum);
} catch (NumberFormatException e) {
throw new RevisionSyntaxException(
"Invalid ancestry length", revstr);
}
while (dist >= 0) {
final ObjectId[] parents = ((Commit) ref).getParentIds();
if (parents.length == 0) {
refId = null;
break;
}
refId = parents[0];
ref = mapCommit(refId);
--dist;
}
i = l - 1;
break;
case '@':
int m;
String time = null;
for (m=i+2; m<rev.length; ++m) {
if (rev[m] == '}') {
time = new String(rev, i+2, m-i-2);
break;
}
}
if (time != null)
throw new RevisionSyntaxException("reflogs not yet supported by revision parser yet", revstr);
i = m - 1;
break;
default:
if (refId != null)
throw new RevisionSyntaxException(revstr);
}
}
if (refId == null)
refId = resolveSimple(revstr);
return refId;
}
private ObjectId resolveSimple(final String revstr) throws IOException {
if (ObjectId.isId(revstr))
return ObjectId.fromString(revstr);
final Ref r = refs.readRef(revstr);
return r != null ? r.getObjectId() : null;
}
/**
* Close all resources used by this repository
*/
public void close() {
closePacks();
}
void closePacks() {
for (int k = packs.length - 1; k >= 0; k--) {
packs[k].close();
}
packs = new PackFile[0];
}
/**
* Add a single existing pack to the list of available pack files.
*
* @param pack
* path of the pack file to open.
* @param idx
* path of the corresponding index file.
* @throws IOException
* index file could not be opened, read, or is not recognized as
* a Git pack file index.
*/
public void openPack(final File pack, final File idx) throws IOException {
final String p = pack.getName();
final String i = idx.getName();
if (p.length() != 50 || !p.startsWith("pack-") || !p.endsWith(".pack"))
throw new IllegalArgumentException("Not a valid pack " + pack);
if (i.length() != 49 || !i.startsWith("pack-") || !i.endsWith(".idx"))
throw new IllegalArgumentException("Not a valid pack " + idx);
if (!p.substring(0,45).equals(i.substring(0,45)))
throw new IllegalArgumentException("Pack " + pack
+ "does not match index " + idx);
final PackFile[] cur = packs;
final PackFile[] arr = new PackFile[cur.length + 1];
System.arraycopy(cur, 0, arr, 1, cur.length);
arr[0] = new PackFile(this, idx, pack);
packs = arr;
}
/**
* Scan the object dirs, including alternates for packs
* to use.
*/
public void scanForPacks() {
final ArrayList<PackFile> p = new ArrayList<PackFile>();
for (int i=0; i<objectsDirs.length; ++i)
scanForPacks(new File(objectsDirs[i], "pack"), p);
final PackFile[] arr = new PackFile[p.size()];
p.toArray(arr);
packs = arr;
}
private void scanForPacks(final File packDir, Collection<PackFile> packList) {
final String[] idxList = packDir.list(new FilenameFilter() {
public boolean accept(final File baseDir, final String n) {
// Must match "pack-[0-9a-f]{40}.idx" to be an index.
return n.length() == 49 && n.endsWith(".idx")
&& n.startsWith("pack-");
}
});
if (idxList != null) {
for (final String indexName : idxList) {
final String n = indexName.substring(0, indexName.length() - 4);
final File idxFile = new File(packDir, n + ".idx");
final File packFile = new File(packDir, n + ".pack");
if (!packFile.isFile()) {
// Sometimes C Git's http fetch transport leaves a
// .idx file behind and does not download the .pack.
// We have to skip over such useless indexes.
//
continue;
}
try {
packList.add(new PackFile(this, idxFile, packFile));
} catch (IOException ioe) {
// Whoops. That's not a pack!
//
ioe.printStackTrace();
}
}
}
}
/**
* Writes a symref (e.g. HEAD) to disk
*
* @param name symref name
* @param target pointed to ref
* @throws IOException
*/
public void writeSymref(final String name, final String target)
throws IOException {
refs.link(name, target);
}
public String toString() {
return "Repository[" + getDirectory() + "]";
}
/**
* @return name of topmost Stacked Git patch.
* @throws IOException
*/
public String getPatch() throws IOException {
final File ptr = new File(getDirectory(),"patches/"+getBranch()+"/applied");
final BufferedReader br = new BufferedReader(new FileReader(ptr));
String last=null;
try {
String line;
while ((line=br.readLine())!=null) {
last = line;
}
} finally {
br.close();
}
return last;
}
/**
* @return name of current branch
* @throws IOException
*/
public String getFullBranch() throws IOException {
final File ptr = new File(getDirectory(),"HEAD");
final BufferedReader br = new BufferedReader(new FileReader(ptr));
String ref;
try {
ref = br.readLine();
} finally {
br.close();
}
if (ref.startsWith("ref: "))
ref = ref.substring(5);
return ref;
}
/**
* @return name of current branch.
* @throws IOException
*/
public String getBranch() throws IOException {
try {
final File ptr = new File(getDirectory(), Constants.HEAD);
final BufferedReader br = new BufferedReader(new FileReader(ptr));
String ref;
try {
ref = br.readLine();
} finally {
br.close();
}
if (ref.startsWith("ref: "))
ref = ref.substring(5);
if (ref.startsWith("refs/heads/"))
ref = ref.substring(11);
return ref;
} catch (FileNotFoundException e) {
final File ptr = new File(getDirectory(),"head-name");
final BufferedReader br = new BufferedReader(new FileReader(ptr));
String ref;
try {
ref = br.readLine();
} finally {
br.close();
}
return ref;
}
}
/**
* @return all known refs (heads, tags, remotes).
*/
public Map<String, Ref> getAllRefs() {
return refs.getAllRefs();
}
/**
* @return all tags; key is short tag name ("v1.0") and value of the entry
* contains the ref with the full tag name ("refs/tags/v1.0").
*/
public Map<String, Ref> getTags() {
return refs.getTags();
}
/**
* @return true if HEAD points to a StGit patch.
*/
public boolean isStGitMode() {
try {
File file = new File(getDirectory(), "HEAD");
BufferedReader reader = new BufferedReader(new FileReader(file));
String string = reader.readLine();
if (!string.startsWith("ref: refs/heads/"))
return false;
String branch = string.substring("ref: refs/heads/".length());
File currentPatches = new File(new File(new File(getDirectory(),
"patches"), branch), "applied");
if (!currentPatches.exists())
return false;
if (currentPatches.length() == 0)
return false;
return true;
} catch (IOException e) {
e.printStackTrace();
return false;
}
}
/**
* @return applied patches in a map indexed on current commit id
* @throws IOException
*/
public Map<ObjectId,StGitPatch> getAppliedPatches() throws IOException {
Map<ObjectId,StGitPatch> ret = new HashMap<ObjectId,StGitPatch>();
if (isStGitMode()) {
File patchDir = new File(new File(getDirectory(),"patches"),getBranch());
BufferedReader apr = new BufferedReader(new FileReader(new File(patchDir,"applied")));
for (String patchName=apr.readLine(); patchName!=null; patchName=apr.readLine()) {
File topFile = new File(new File(new File(patchDir,"patches"), patchName), "top");
BufferedReader tfr = new BufferedReader(new FileReader(topFile));
String objectId = tfr.readLine();
ObjectId id = ObjectId.fromString(objectId);
ret.put(id, new StGitPatch(patchName, id));
tfr.close();
}
apr.close();
}
return ret;
}
/** Clean up stale caches */
public void refreshFromDisk() {
refs.clearCache();
}
/**
* @return a representation of the index associated with this repo
* @throws IOException
*/
public GitIndex getIndex() throws IOException {
if (index == null) {
index = new GitIndex(this);
index.read();
} else {
index.rereadIfNecessary();
}
return index;
}
static byte[] gitInternalSlash(byte[] bytes) {
if (File.separatorChar == '/')
return bytes;
for (int i=0; i<bytes.length; ++i)
if (bytes[i] == File.separatorChar)
bytes[i] = '/';
return bytes;
}
/**
* @return an important state
*/
public RepositoryState getRepositoryState() {
if (new File(getWorkDir(), ".dotest").exists())
return RepositoryState.REBASING;
if (new File(gitDir,".dotest-merge").exists())
return RepositoryState.REBASING_INTERACTIVE;
if (new File(gitDir,"MERGE_HEAD").exists())
return RepositoryState.MERGING;
if (new File(gitDir,"BISECT_LOG").exists())
return RepositoryState.BISECTING;
return RepositoryState.SAFE;
}
/**
* Check validity of a ref name. It must not contain character that has
* a special meaning in a Git object reference expression. Some other
* dangerous characters are also excluded.
*
* @param refName
*
* @return true if refName is a valid ref name
*/
public static boolean isValidRefName(final String refName) {
final int len = refName.length();
if (len == 0)
return false;
char p = '\0';
for (int i=0; i<len; ++i) {
char c = refName.charAt(i);
if (c <= ' ')
return false;
switch(c) {
case '.':
if (i == 0)
return false;
if (p == '/')
return false;
if (p == '.')
return false;
break;
case '/':
if (i == 0)
return false;
if (i == len -1)
return false;
break;
case '~': case '^': case ':':
case '?': case '[':
return false;
case '*':
return false;
}
p = c;
}
return true;
}
/**
* Strip work dir and return normalized repository path
*
* @param wd Work dir
* @param f File whose path shall be stripped of its workdir
* @return normalized repository relative path
*/
public static String stripWorkDir(File wd, File f) {
String relName = f.getPath().substring(wd.getPath().length() + 1);
relName = relName.replace(File.separatorChar, '/');
return relName;
}
/**
* @return the workdir file, i.e. where the files are checked out
*/
public File getWorkDir() {
return getDirectory().getParentFile();
}
/**
* Register a {@link RepositoryListener} which will be notified
* when ref changes are detected.
*
* @param l
*/
public void addRepositoryChangedListener(final RepositoryListener l) {
listeners.add(l);
}
/**
* Remove a registered {@link RepositoryListener}
* @param l
*/
public void removeRepositoryChangedListener(final RepositoryListener l) {
listeners.remove(l);
}
/**
* Register a global {@link RepositoryListener} which will be notified
* when a ref changes in any repository are detected.
*
* @param l
*/
public static void addAnyRepositoryChangedListener(final RepositoryListener l) {
allListeners.add(l);
}
/**
* Remove a globally registered {@link RepositoryListener}
* @param l
*/
public static void removeAnyRepositoryChangedListener(final RepositoryListener l) {
allListeners.remove(l);
}
void fireRefsMaybeChanged() {
if (refs.lastRefModification != refs.lastNotifiedRefModification) {
refs.lastNotifiedRefModification = refs.lastRefModification;
final RefsChangedEvent event = new RefsChangedEvent(this);
List<RepositoryListener> all;
synchronized (listeners) {
all = new ArrayList<RepositoryListener>(listeners);
}
synchronized (allListeners) {
all.addAll(allListeners);
}
for (final RepositoryListener l : all) {
l.refsChanged(event);
}
}
}
void fireIndexChanged() {
final IndexChangedEvent event = new IndexChangedEvent(this);
List<RepositoryListener> all;
synchronized (listeners) {
all = new ArrayList<RepositoryListener>(listeners);
}
synchronized (allListeners) {
all.addAll(allListeners);
}
for (final RepositoryListener l : all) {
l.indexChanged(event);
}
}
/**
* Force a scan for changed refs.
*
* @throws IOException
*/
public void scanForRepoChanges() throws IOException {
getAllRefs(); // This will look for changes to refs
getIndex(); // This will detect changes in the index
}
}
| true
| true
|
public ObjectId resolve(final String revstr) throws IOException {
char[] rev = revstr.toCharArray();
Object ref = null;
ObjectId refId = null;
for (int i = 0; i < rev.length; ++i) {
switch (rev[i]) {
case '^':
if (refId == null) {
String refstr = new String(rev,0,i);
refId = resolveSimple(refstr);
if (refId == null)
return null;
}
if (i + 1 < rev.length) {
switch (rev[i + 1]) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
int j;
ref = mapObject(refId, null);
if (!(ref instanceof Commit))
throw new IncorrectObjectTypeException(refId, Constants.TYPE_COMMIT);
for (j=i+1; j<rev.length; ++j) {
if (!Character.isDigit(rev[j]))
break;
}
String parentnum = new String(rev, i+1, j-i-1);
int pnum;
try {
pnum = Integer.parseInt(parentnum);
} catch (NumberFormatException e) {
throw new RevisionSyntaxException(
"Invalid commit parent number",
revstr);
}
if (pnum != 0) {
final ObjectId parents[] = ((Commit) ref)
.getParentIds();
if (pnum > parents.length)
refId = null;
else
refId = parents[pnum - 1];
}
i = j - 1;
break;
case '{':
int k;
String item = null;
for (k=i+2; k<rev.length; ++k) {
if (rev[k] == '}') {
item = new String(rev, i+2, k-i-2);
break;
}
}
i = k;
if (item != null)
if (item.equals("tree")) {
ref = mapObject(refId, null);
while (ref instanceof Tag) {
Tag t = (Tag)ref;
refId = t.getObjId();
ref = mapObject(refId, null);
}
if (ref instanceof Treeish)
refId = ((Treeish)ref).getTreeId();
else
throw new IncorrectObjectTypeException(refId, Constants.TYPE_TREE);
}
else if (item.equals("commit")) {
ref = mapObject(refId, null);
while (ref instanceof Tag) {
Tag t = (Tag)ref;
refId = t.getObjId();
ref = mapObject(refId, null);
}
if (!(ref instanceof Commit))
throw new IncorrectObjectTypeException(refId, Constants.TYPE_COMMIT);
}
else if (item.equals("blob")) {
ref = mapObject(refId, null);
while (ref instanceof Tag) {
Tag t = (Tag)ref;
refId = t.getObjId();
ref = mapObject(refId, null);
}
if (!(ref instanceof byte[]))
throw new IncorrectObjectTypeException(refId, Constants.TYPE_COMMIT);
}
else if (item.equals("")) {
ref = mapObject(refId, null);
if (ref instanceof Tag)
refId = ((Tag)ref).getObjId();
else {
// self
}
}
else
throw new RevisionSyntaxException(revstr);
else
throw new RevisionSyntaxException(revstr);
break;
default:
ref = mapObject(refId, null);
if (ref instanceof Commit) {
final ObjectId parents[] = ((Commit) ref)
.getParentIds();
if (parents.length == 0)
refId = null;
else
refId = parents[0];
} else
throw new IncorrectObjectTypeException(refId, Constants.TYPE_COMMIT);
}
} else {
ref = mapObject(refId, null);
if (ref instanceof Commit) {
final ObjectId parents[] = ((Commit) ref)
.getParentIds();
if (parents.length == 0)
refId = null;
else
refId = parents[0];
} else
throw new IncorrectObjectTypeException(refId, Constants.TYPE_COMMIT);
}
break;
case '~':
if (ref == null) {
String refstr = new String(rev,0,i);
refId = resolveSimple(refstr);
ref = mapCommit(refId);
}
int l;
for (l = i + 1; l < rev.length; ++l) {
if (!Character.isDigit(rev[l]))
break;
}
String distnum = new String(rev, i+1, l-i-1);
int dist;
try {
dist = Integer.parseInt(distnum);
} catch (NumberFormatException e) {
throw new RevisionSyntaxException(
"Invalid ancestry length", revstr);
}
while (dist >= 0) {
final ObjectId[] parents = ((Commit) ref).getParentIds();
if (parents.length == 0) {
refId = null;
break;
}
refId = parents[0];
ref = mapCommit(refId);
--dist;
}
i = l - 1;
break;
case '@':
int m;
String time = null;
for (m=i+2; m<rev.length; ++m) {
if (rev[m] == '}') {
time = new String(rev, i+2, m-i-2);
break;
}
}
if (time != null)
throw new RevisionSyntaxException("reflogs not yet supported by revision parser yet", revstr);
i = m - 1;
break;
default:
if (refId != null)
throw new RevisionSyntaxException(revstr);
}
}
if (refId == null)
refId = resolveSimple(revstr);
return refId;
}
|
public ObjectId resolve(final String revstr) throws IOException {
char[] rev = revstr.toCharArray();
Object ref = null;
ObjectId refId = null;
for (int i = 0; i < rev.length; ++i) {
switch (rev[i]) {
case '^':
if (refId == null) {
String refstr = new String(rev,0,i);
refId = resolveSimple(refstr);
if (refId == null)
return null;
}
if (i + 1 < rev.length) {
switch (rev[i + 1]) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
int j;
ref = mapObject(refId, null);
if (!(ref instanceof Commit))
throw new IncorrectObjectTypeException(refId, Constants.TYPE_COMMIT);
for (j=i+1; j<rev.length; ++j) {
if (!Character.isDigit(rev[j]))
break;
}
String parentnum = new String(rev, i+1, j-i-1);
int pnum;
try {
pnum = Integer.parseInt(parentnum);
} catch (NumberFormatException e) {
throw new RevisionSyntaxException(
"Invalid commit parent number",
revstr);
}
if (pnum != 0) {
final ObjectId parents[] = ((Commit) ref)
.getParentIds();
if (pnum > parents.length)
refId = null;
else
refId = parents[pnum - 1];
}
i = j - 1;
break;
case '{':
int k;
String item = null;
for (k=i+2; k<rev.length; ++k) {
if (rev[k] == '}') {
item = new String(rev, i+2, k-i-2);
break;
}
}
i = k;
if (item != null)
if (item.equals("tree")) {
ref = mapObject(refId, null);
while (ref instanceof Tag) {
Tag t = (Tag)ref;
refId = t.getObjId();
ref = mapObject(refId, null);
}
if (ref instanceof Treeish)
refId = ((Treeish)ref).getTreeId();
else
throw new IncorrectObjectTypeException(refId, Constants.TYPE_TREE);
}
else if (item.equals("commit")) {
ref = mapObject(refId, null);
while (ref instanceof Tag) {
Tag t = (Tag)ref;
refId = t.getObjId();
ref = mapObject(refId, null);
}
if (!(ref instanceof Commit))
throw new IncorrectObjectTypeException(refId, Constants.TYPE_COMMIT);
}
else if (item.equals("blob")) {
ref = mapObject(refId, null);
while (ref instanceof Tag) {
Tag t = (Tag)ref;
refId = t.getObjId();
ref = mapObject(refId, null);
}
if (!(ref instanceof byte[]))
throw new IncorrectObjectTypeException(refId, Constants.TYPE_BLOB);
}
else if (item.equals("")) {
ref = mapObject(refId, null);
if (ref instanceof Tag)
refId = ((Tag)ref).getObjId();
else {
// self
}
}
else
throw new RevisionSyntaxException(revstr);
else
throw new RevisionSyntaxException(revstr);
break;
default:
ref = mapObject(refId, null);
if (ref instanceof Commit) {
final ObjectId parents[] = ((Commit) ref)
.getParentIds();
if (parents.length == 0)
refId = null;
else
refId = parents[0];
} else
throw new IncorrectObjectTypeException(refId, Constants.TYPE_COMMIT);
}
} else {
ref = mapObject(refId, null);
if (ref instanceof Commit) {
final ObjectId parents[] = ((Commit) ref)
.getParentIds();
if (parents.length == 0)
refId = null;
else
refId = parents[0];
} else
throw new IncorrectObjectTypeException(refId, Constants.TYPE_COMMIT);
}
break;
case '~':
if (ref == null) {
String refstr = new String(rev,0,i);
refId = resolveSimple(refstr);
ref = mapCommit(refId);
}
int l;
for (l = i + 1; l < rev.length; ++l) {
if (!Character.isDigit(rev[l]))
break;
}
String distnum = new String(rev, i+1, l-i-1);
int dist;
try {
dist = Integer.parseInt(distnum);
} catch (NumberFormatException e) {
throw new RevisionSyntaxException(
"Invalid ancestry length", revstr);
}
while (dist >= 0) {
final ObjectId[] parents = ((Commit) ref).getParentIds();
if (parents.length == 0) {
refId = null;
break;
}
refId = parents[0];
ref = mapCommit(refId);
--dist;
}
i = l - 1;
break;
case '@':
int m;
String time = null;
for (m=i+2; m<rev.length; ++m) {
if (rev[m] == '}') {
time = new String(rev, i+2, m-i-2);
break;
}
}
if (time != null)
throw new RevisionSyntaxException("reflogs not yet supported by revision parser yet", revstr);
i = m - 1;
break;
default:
if (refId != null)
throw new RevisionSyntaxException(revstr);
}
}
if (refId == null)
refId = resolveSimple(revstr);
return refId;
}
|
diff --git a/src/com/dozersoftware/snap/PositionReporter.java b/src/com/dozersoftware/snap/PositionReporter.java
index 775301d..e386cea 100644
--- a/src/com/dozersoftware/snap/PositionReporter.java
+++ b/src/com/dozersoftware/snap/PositionReporter.java
@@ -1,41 +1,41 @@
/*
* Copyright 2010 Dozer Software, LLC
* This software is licensed under the Simplified BSD License.
* See license.txt for details.
*/
package com.dozersoftware.snap;
import org.jboss.soa.esb.actions.AbstractActionLifecycle;
import org.jboss.soa.esb.client.ServiceInvoker;
import org.jboss.soa.esb.helpers.ConfigTree;
import org.jboss.soa.esb.listeners.message.MessageDeliverException;
import org.jboss.soa.esb.message.Message;
public class PositionReporter extends AbstractActionLifecycle {
private ConfigTree _config;
public PositionReporter(ConfigTree config) {
this._config = config;
}
public Message process(Message message) {
try {
- new ServiceInvoker("Norm", "NormProcessor").deliverAsync(message);
+ new ServiceInvoker("NormOut", "NormProcessor").deliverAsync(message);
} catch (MessageDeliverException e) {
e.printStackTrace();
}
//System.out.println("Kicking a PositionReport!");
return message;
}
public void exceptionHandler(Message message, Throwable exception) {
System.out.println("!ERROR!");
System.out.println(exception.getMessage());
System.out.println("For Message: ");
System.out.println(message.getBody().get());
}
}
| true
| true
|
public Message process(Message message) {
try {
new ServiceInvoker("Norm", "NormProcessor").deliverAsync(message);
} catch (MessageDeliverException e) {
e.printStackTrace();
}
//System.out.println("Kicking a PositionReport!");
return message;
}
|
public Message process(Message message) {
try {
new ServiceInvoker("NormOut", "NormProcessor").deliverAsync(message);
} catch (MessageDeliverException e) {
e.printStackTrace();
}
//System.out.println("Kicking a PositionReport!");
return message;
}
|
diff --git a/plugin/src/main/java/org/exoplatform/crowdin/mojo/UpdateSourcesMojo.java b/plugin/src/main/java/org/exoplatform/crowdin/mojo/UpdateSourcesMojo.java
index b96693b..e486541 100644
--- a/plugin/src/main/java/org/exoplatform/crowdin/mojo/UpdateSourcesMojo.java
+++ b/plugin/src/main/java/org/exoplatform/crowdin/mojo/UpdateSourcesMojo.java
@@ -1,411 +1,411 @@
/*
* Copyright (C) 2003-2013 eXo Platform SAS.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 3 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.exoplatform.crowdin.mojo;
import static org.twdata.maven.mojoexecutor.MojoExecutor.element;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.Properties;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import org.exoplatform.crowdin.model.CrowdinFile.Type;
import org.exoplatform.crowdin.model.CrowdinFileFactory;
import org.exoplatform.crowdin.model.CrowdinTranslation;
import org.exoplatform.crowdin.model.SourcesRepository;
import org.exoplatform.crowdin.utils.IOSResouceBundleFileUtils;
import org.exoplatform.crowdin.utils.PropsToXML;
import org.exoplatform.crowdin.utils.XMLResourceBundleUtils;
/**
* Update projects sources from crowdin translations
*/
@Mojo(name = "update-sources")
public class UpdateSourcesMojo extends AbstractCrowdinMojo {
@Override
public void crowdInMojoExecute() throws MojoExecutionException, MojoFailureException {
List<String> languagesToProcess = new ArrayList<String>();
if (getLanguages().contains("all")) {
languagesToProcess = getLanguagesListFromCrowdInArchive(crowdInArchive);
} else {
languagesToProcess = getLanguages();
}
for (String language : languagesToProcess) {
getLog().info("Updates for locale " + language);
applyTranslations(getWorkingDir(), crowdInArchive.getPath(), language);
for (SourcesRepository repository : getSourcesRepositories()) {
try {
File localVersionRepository = new File(getWorkingDir(), repository.getLocalDirectory());
getLog().info("Extract/Apply/Commit/Push changes on " + repository.getName() + " (branch: " + repository.getBranch() + ")");
// Create a patch with local changes
getLog().info("Create patch(s) for " + repository.getLocalDirectory() + "...");
File patchFile = new File(getProject().getBuild().getDirectory(), repository.getLocalDirectory() + "-" + language + ".patch");
// create patch all files when activate new language or properties
if (isActivate()){
getLog().info("Activation new language/properties ");
execGit(localVersionRepository, "add .");
execGit(localVersionRepository, "diff --ignore-all-space HEAD > " + patchFile.getAbsolutePath());
getLog().info("Create patch file at: "+ patchFile.getAbsolutePath());
}
// create patch only tracked files
else{
execGit(localVersionRepository, "diff --ignore-all-space > " + patchFile.getAbsolutePath());
}
getLog().info("Done.");
// Reset our local copy
getLog().info("Reset repository " + repository.getLocalDirectory() + "...");
execGit(localVersionRepository, "reset --hard HEAD");
execGit(localVersionRepository, "clean -fd");
getLog().info("Done.");
BufferedReader br = new BufferedReader(new FileReader(patchFile));
if (br.readLine() == null) {
getLog().info("No change for locale " + language + " from crowdin extract done on " + getCrowdinDownloadDate());
} else {
// Apply the patch
getLog().info("Apply patch(s) for " + repository.getLocalDirectory() + "...");
execGit(localVersionRepository, "apply --ignore-whitespace " + patchFile.getAbsolutePath(), element("successCode", "0"), element("successCode", "1"));
// commit all untracked and tracked files
if (isActivate()){
execGit(localVersionRepository, "add .");
}
getLog().info("Done.");
getLog().info("Commit changes for " + repository.getLocalDirectory() + "...");
execGit(localVersionRepository, "commit -a -m '" + language + " injection on " + getCrowdinDownloadDate() + "'", element("successCode", "0"), element("successCode", "1"));
getLog().info("Done.");
// Push it
if (!isDryRun()) {
getLog().info("Pushing changes for " + repository.getLocalDirectory() + "...");
execGit(localVersionRepository, "push origin " + repository.getBranch());
getLog().info("Done.");
}
}
} catch (Exception e) {
throw new MojoExecutionException("Error while updating project " + repository.getName(), e);
}
}
}
}
private List<String> getLanguagesListFromCrowdInArchive(File zip) {
List<String> languagesToProcess = new ArrayList<String>();
// Let's extract the list of languages from crowdIn archive
try {
ZipInputStream zipinputstream = new ZipInputStream(new FileInputStream(zip));
ZipEntry zipentry = zipinputstream.getNextEntry();
while (zipentry != null) {
// for each entry to be extracted
if (zipentry.isDirectory()) {
zipentry = zipinputstream.getNextEntry();
continue;
}
String zipentryName = zipentry.getName();
zipentryName = CrowdinFileFactory.encodeMinusCharacterInPath(zipentryName, false);
zipentryName = zipentryName.replace('/', File.separatorChar);
zipentryName = zipentryName.replace('\\', File.separatorChar);
String[] path = zipentryName.split(File.separator);
if (!languagesToProcess.contains(path[0]))
languagesToProcess.add(path[0]);
zipentry = zipinputstream.getNextEntry();
}// while
zipinputstream.close();
} catch (Exception e) {
getLog().error("Update aborted !", e);
}
return languagesToProcess;
}
private void applyTranslations(File _destFolder, String _zipFile, String locale) {
try {
byte[] buf = new byte[1024];
ZipInputStream zipinputstream = null;
ZipEntry zipentry;
zipinputstream = new ZipInputStream(new FileInputStream(_zipFile));
zipentry = zipinputstream.getNextEntry();
while (zipentry != null) {
// for each entry to be extracted
if (zipentry.isDirectory()) {
zipentry = zipinputstream.getNextEntry();
continue;
}
String zipentryName = zipentry.getName();
getLog().debug("Processing : " + zipentryName);
zipentryName = CrowdinFileFactory.encodeMinusCharacterInPath(zipentryName, false);
zipentryName = zipentryName.replace('/', File.separatorChar);
zipentryName = zipentryName.replace('\\', File.separatorChar);
String[] path = zipentryName.split(File.separator);
String lang = path[0];
String crowdinProj = path[1];
String proj = path[2];
String fileName = "";
// process only the languages specified
if (!(lang.equalsIgnoreCase(locale))) {
zipentry = zipinputstream.getNextEntry();
continue;
}
try {
String cp = crowdinProj + File.separator + proj;
Properties currentProj = getProperties().get(proj);
// ignore projects that is not managed by the plugin
if (currentProj == null) {
zipentry = zipinputstream.getNextEntry();
continue;
}
String key = zipentryName.substring(zipentryName.indexOf(cp) + cp.length() + 1);
String value = currentProj.getProperty(key);
if (value == null) {
zipentry = zipinputstream.getNextEntry();
continue;
}
/**
* if android, don't save to default master folder but save to
* "values-language" folder (for example) res/values/strings.xml > res/values-fr/strings.xml
*/
if (zipentryName.contains("android")) {
if (!locale.contains("en")) {
String localizable = CrowdinTranslation.encodeAndroidLocale(locale);
value = value.replace("res/values/", "res/values-" + localizable + "/");
}
}
/**
* if iOS, don't save to default master folder but save to
* "language.proj" folder (for example)
* /Resources/en.lproj/Localizable.string > /Resources/fr.lproj/Localizable.string
*/
else if (zipentryName.contains("ios")) {
if (!locale.contains("en")) {
String localizable = CrowdinTranslation.encodeIOSLocale(locale);
value = value.replace("en.lproj", localizable + ".lproj");
}
}
zipentryName = zipentryName.substring(0, zipentryName.indexOf(proj) + proj.length());
lang = CrowdinTranslation.encodeLanguageName(lang, false);
fileName = value.substring(value.lastIndexOf(File.separatorChar) + 1);
getLog().info("Updating " + zipentryName + " - " + value.substring(0, value.lastIndexOf(File.separatorChar) + 1) + fileName);
String name = fileName.substring(0, fileName.lastIndexOf("."));
String extension = fileName.substring(fileName.lastIndexOf("."));
if (name.lastIndexOf("_en") > 0) {
name = name.substring(0, name.lastIndexOf("_en"));
}
if (key.contains("gadget") || value.contains("gadget")) {
if ("default".equalsIgnoreCase(name)) {
fileName = lang + extension;
} else if (name.contains("_ALL")) {
fileName = lang + "_ALL" + extension;
} else {
fileName = name + "_" + lang + extension;
}
}
//if android, don't change xml to properties
else if (zipentryName.contains("android") ){
fileName = name + ".xml";
}
//if iOS
else if(zipentryName.contains("ios") ){
fileName = name + extension;
}
else {
fileName = name + "_" + lang + extension;
}
String parentDir = _destFolder + File.separator + proj + File.separator + value.substring(0, value.lastIndexOf(File.separatorChar) + 1);
getLog().debug("parentDir : " + parentDir);
parentDir = parentDir.replace('/', File.separatorChar).replace('\\', File.separatorChar);
String entryName = parentDir + fileName;
Type resourceBundleType = (key.indexOf("gadget") >= 0) ? Type.GADGET : Type.PORTLET;
File newFile = new File(entryName.substring(0, entryName.lastIndexOf(File.separatorChar)));
newFile.mkdirs();
// Need improve, some portlets in CS use xml format for vi, ar locales
boolean isXML = (entryName.indexOf(".xml") > 0);
if (isXML) {
//if is Android resouce bundle
if(zipentryName.contains("mobile") && zipentryName.contains("android")){
String resourceTranslationFilePath = parentDir + name + extension;
String localizable = CrowdinTranslation.encodeAndroidLocale(locale);
- String masterFilePath = resourceTranslationFilePath.replaceAll("-" + localizable, "");
+ String masterFilePath = resourceTranslationFilePath.replaceAll("res/values-" + localizable, "res/values");
//create temporary file to persists zipinputstream
int n;
FileOutputStream fileoutputstream;
fileoutputstream = new FileOutputStream(resourceTranslationFilePath+".ziptempo");
while ((n = zipinputstream.read(buf, 0, 1024)) > -1) {
fileoutputstream.write(buf, 0, n);
}
fileoutputstream.close();
String crowdinFilePath = resourceTranslationFilePath + ".ziptempo";
FileInputStream input = new FileInputStream(crowdinFilePath);
XMLResourceBundleUtils.setLog(getLog());
XMLResourceBundleUtils.injectTranslation(input, resourceTranslationFilePath, masterFilePath);
//delete ziptempo file
try{
File file = new File(crowdinFilePath);
if(file.delete()){
if(getLog().isDebugEnabled())
getLog().debug(file.getName() + " is deleted!");
}else{
if(getLog().isDebugEnabled())
getLog().debug("Delete operation is failed.");
}
}catch(Exception e){
getLog().error(e);
}
}
else{
// create the temporary properties file to be used for PropsToXML (use the file in Crowdin zip)
//if not in mobile project, convert xml to properties
if (!zipentryName.contains("mobile")) {
entryName = entryName.replaceAll(".xml", ".properties");
}
int n;
FileOutputStream fileoutputstream;
fileoutputstream = new FileOutputStream(entryName);
while ((n = zipinputstream.read(buf, 0, 1024)) > -1) {
fileoutputstream.write(buf, 0, n);
}
fileoutputstream.close();
File propertiesFile = new File(entryName);
// don't convert to ascii in mobile project
if (!zipentryName.contains("mobile")) {
PropsToXML.execShellCommand("native2ascii -encoding UTF8 " + propertiesFile.getPath()
+ " " + propertiesFile.getPath());
}
PropsToXML.parse(propertiesFile.getPath(), resourceBundleType);
propertiesFile.delete();
}
}
// when project is iOS
else if(zipentryName.contains("ios")){
String localFile = parentDir + name + extension;
String localizable = CrowdinTranslation.encodeIOSLocale(locale);
String masterFile = localFile.replace(localizable + ".lproj", "en.lproj");
String resourceTranslationFilePath = localFile;
//Write tempo zipinputstream
int n;
FileOutputStream fileoutputstream;
fileoutputstream = new FileOutputStream(resourceTranslationFilePath+".ziptempo");
while ((n = zipinputstream.read(buf, 0, 1024)) > -1) {
fileoutputstream.write(buf, 0, n);
}
fileoutputstream.close();
String crowdinFilePath = resourceTranslationFilePath + ".ziptempo";
//master file code base EN
String resourceMasterFilePath = masterFile;
//translation file code base LANGUAGE
IOSResouceBundleFileUtils.setLog(getLog());
IOSResouceBundleFileUtils.injectTranslation(crowdinFilePath, resourceMasterFilePath, resourceTranslationFilePath);
} else {
// identify the master properties file
String masterFile = parentDir + name + extension;
if (!new File(masterFile).exists())
masterFile = parentDir + name + "_en" + extension;
if (!new File(masterFile).exists())
throw new FileNotFoundException("Cannot create or update " + entryName + " as the master file " + name + extension + " (or " + name + "_en" + extension + ")" + " does not exist!");
// use the master file as a skeleton and fill in with translations from Crowdin
PropertiesConfiguration config = new PropertiesConfiguration(masterFile);
PropertiesConfiguration.setDefaultListDelimiter('=');
config.setEncoding("UTF-8");
Properties propsCrowdin = new Properties();
propsCrowdin.load(zipinputstream);
Properties props = new Properties();
props.load(new FileInputStream(new File(masterFile)));
Enumeration e = props.propertyNames();
while (e.hasMoreElements()) {
String propKey = (String) e.nextElement();
String crowdinValue = propsCrowdin.getProperty(propKey);
if (null != crowdinValue && crowdinValue.length() > 0)
config.setProperty(propKey, crowdinValue);
}
// if language is English, update master file and the English file if it exists (do not create new)
if ("en".equals(lang)) {
config.save(masterFile);
// perform post-processing for the output file
org.exoplatform.crowdin.utils.FileUtils.replaceCharactersInFile(masterFile, "config/special_character_processing.properties", "UpdateSourceSpecialCharacters");
if (new File(entryName).exists()) {
config.save(entryName);
//use java
org.exoplatform.crowdin.utils.FileUtils.replaceCharactersInFile(entryName, "config/special_character_processing.properties", "UpdateSourceSpecialCharacters");
}
} else {
// always create new (or update) for other languages
config.save(entryName);
//user java
org.exoplatform.crowdin.utils.FileUtils.replaceCharactersInFile(entryName, "config/special_character_processing.properties", "UpdateSourceSpecialCharacters");
}
}
zipinputstream.closeEntry();
} catch (Exception e) {
getLog().warn("Error while applying change for " + zipentryName + " - " + fileName + " : " + e.getMessage());
}
zipentry = zipinputstream.getNextEntry();
}// while
zipinputstream.close();
} catch (Exception e) {
getLog().error("Update aborted !", e);
}
}
}
| true
| true
|
private void applyTranslations(File _destFolder, String _zipFile, String locale) {
try {
byte[] buf = new byte[1024];
ZipInputStream zipinputstream = null;
ZipEntry zipentry;
zipinputstream = new ZipInputStream(new FileInputStream(_zipFile));
zipentry = zipinputstream.getNextEntry();
while (zipentry != null) {
// for each entry to be extracted
if (zipentry.isDirectory()) {
zipentry = zipinputstream.getNextEntry();
continue;
}
String zipentryName = zipentry.getName();
getLog().debug("Processing : " + zipentryName);
zipentryName = CrowdinFileFactory.encodeMinusCharacterInPath(zipentryName, false);
zipentryName = zipentryName.replace('/', File.separatorChar);
zipentryName = zipentryName.replace('\\', File.separatorChar);
String[] path = zipentryName.split(File.separator);
String lang = path[0];
String crowdinProj = path[1];
String proj = path[2];
String fileName = "";
// process only the languages specified
if (!(lang.equalsIgnoreCase(locale))) {
zipentry = zipinputstream.getNextEntry();
continue;
}
try {
String cp = crowdinProj + File.separator + proj;
Properties currentProj = getProperties().get(proj);
// ignore projects that is not managed by the plugin
if (currentProj == null) {
zipentry = zipinputstream.getNextEntry();
continue;
}
String key = zipentryName.substring(zipentryName.indexOf(cp) + cp.length() + 1);
String value = currentProj.getProperty(key);
if (value == null) {
zipentry = zipinputstream.getNextEntry();
continue;
}
/**
* if android, don't save to default master folder but save to
* "values-language" folder (for example) res/values/strings.xml > res/values-fr/strings.xml
*/
if (zipentryName.contains("android")) {
if (!locale.contains("en")) {
String localizable = CrowdinTranslation.encodeAndroidLocale(locale);
value = value.replace("res/values/", "res/values-" + localizable + "/");
}
}
/**
* if iOS, don't save to default master folder but save to
* "language.proj" folder (for example)
* /Resources/en.lproj/Localizable.string > /Resources/fr.lproj/Localizable.string
*/
else if (zipentryName.contains("ios")) {
if (!locale.contains("en")) {
String localizable = CrowdinTranslation.encodeIOSLocale(locale);
value = value.replace("en.lproj", localizable + ".lproj");
}
}
zipentryName = zipentryName.substring(0, zipentryName.indexOf(proj) + proj.length());
lang = CrowdinTranslation.encodeLanguageName(lang, false);
fileName = value.substring(value.lastIndexOf(File.separatorChar) + 1);
getLog().info("Updating " + zipentryName + " - " + value.substring(0, value.lastIndexOf(File.separatorChar) + 1) + fileName);
String name = fileName.substring(0, fileName.lastIndexOf("."));
String extension = fileName.substring(fileName.lastIndexOf("."));
if (name.lastIndexOf("_en") > 0) {
name = name.substring(0, name.lastIndexOf("_en"));
}
if (key.contains("gadget") || value.contains("gadget")) {
if ("default".equalsIgnoreCase(name)) {
fileName = lang + extension;
} else if (name.contains("_ALL")) {
fileName = lang + "_ALL" + extension;
} else {
fileName = name + "_" + lang + extension;
}
}
//if android, don't change xml to properties
else if (zipentryName.contains("android") ){
fileName = name + ".xml";
}
//if iOS
else if(zipentryName.contains("ios") ){
fileName = name + extension;
}
else {
fileName = name + "_" + lang + extension;
}
String parentDir = _destFolder + File.separator + proj + File.separator + value.substring(0, value.lastIndexOf(File.separatorChar) + 1);
getLog().debug("parentDir : " + parentDir);
parentDir = parentDir.replace('/', File.separatorChar).replace('\\', File.separatorChar);
String entryName = parentDir + fileName;
Type resourceBundleType = (key.indexOf("gadget") >= 0) ? Type.GADGET : Type.PORTLET;
File newFile = new File(entryName.substring(0, entryName.lastIndexOf(File.separatorChar)));
newFile.mkdirs();
// Need improve, some portlets in CS use xml format for vi, ar locales
boolean isXML = (entryName.indexOf(".xml") > 0);
if (isXML) {
//if is Android resouce bundle
if(zipentryName.contains("mobile") && zipentryName.contains("android")){
String resourceTranslationFilePath = parentDir + name + extension;
String localizable = CrowdinTranslation.encodeAndroidLocale(locale);
String masterFilePath = resourceTranslationFilePath.replaceAll("-" + localizable, "");
//create temporary file to persists zipinputstream
int n;
FileOutputStream fileoutputstream;
fileoutputstream = new FileOutputStream(resourceTranslationFilePath+".ziptempo");
while ((n = zipinputstream.read(buf, 0, 1024)) > -1) {
fileoutputstream.write(buf, 0, n);
}
fileoutputstream.close();
String crowdinFilePath = resourceTranslationFilePath + ".ziptempo";
FileInputStream input = new FileInputStream(crowdinFilePath);
XMLResourceBundleUtils.setLog(getLog());
XMLResourceBundleUtils.injectTranslation(input, resourceTranslationFilePath, masterFilePath);
//delete ziptempo file
try{
File file = new File(crowdinFilePath);
if(file.delete()){
if(getLog().isDebugEnabled())
getLog().debug(file.getName() + " is deleted!");
}else{
if(getLog().isDebugEnabled())
getLog().debug("Delete operation is failed.");
}
}catch(Exception e){
getLog().error(e);
}
}
else{
// create the temporary properties file to be used for PropsToXML (use the file in Crowdin zip)
//if not in mobile project, convert xml to properties
if (!zipentryName.contains("mobile")) {
entryName = entryName.replaceAll(".xml", ".properties");
}
int n;
FileOutputStream fileoutputstream;
fileoutputstream = new FileOutputStream(entryName);
while ((n = zipinputstream.read(buf, 0, 1024)) > -1) {
fileoutputstream.write(buf, 0, n);
}
fileoutputstream.close();
File propertiesFile = new File(entryName);
// don't convert to ascii in mobile project
if (!zipentryName.contains("mobile")) {
PropsToXML.execShellCommand("native2ascii -encoding UTF8 " + propertiesFile.getPath()
+ " " + propertiesFile.getPath());
}
PropsToXML.parse(propertiesFile.getPath(), resourceBundleType);
propertiesFile.delete();
}
}
// when project is iOS
else if(zipentryName.contains("ios")){
String localFile = parentDir + name + extension;
String localizable = CrowdinTranslation.encodeIOSLocale(locale);
String masterFile = localFile.replace(localizable + ".lproj", "en.lproj");
String resourceTranslationFilePath = localFile;
//Write tempo zipinputstream
int n;
FileOutputStream fileoutputstream;
fileoutputstream = new FileOutputStream(resourceTranslationFilePath+".ziptempo");
while ((n = zipinputstream.read(buf, 0, 1024)) > -1) {
fileoutputstream.write(buf, 0, n);
}
fileoutputstream.close();
String crowdinFilePath = resourceTranslationFilePath + ".ziptempo";
//master file code base EN
String resourceMasterFilePath = masterFile;
//translation file code base LANGUAGE
IOSResouceBundleFileUtils.setLog(getLog());
IOSResouceBundleFileUtils.injectTranslation(crowdinFilePath, resourceMasterFilePath, resourceTranslationFilePath);
} else {
// identify the master properties file
String masterFile = parentDir + name + extension;
if (!new File(masterFile).exists())
masterFile = parentDir + name + "_en" + extension;
if (!new File(masterFile).exists())
throw new FileNotFoundException("Cannot create or update " + entryName + " as the master file " + name + extension + " (or " + name + "_en" + extension + ")" + " does not exist!");
// use the master file as a skeleton and fill in with translations from Crowdin
PropertiesConfiguration config = new PropertiesConfiguration(masterFile);
PropertiesConfiguration.setDefaultListDelimiter('=');
config.setEncoding("UTF-8");
Properties propsCrowdin = new Properties();
propsCrowdin.load(zipinputstream);
Properties props = new Properties();
props.load(new FileInputStream(new File(masterFile)));
Enumeration e = props.propertyNames();
while (e.hasMoreElements()) {
String propKey = (String) e.nextElement();
String crowdinValue = propsCrowdin.getProperty(propKey);
if (null != crowdinValue && crowdinValue.length() > 0)
config.setProperty(propKey, crowdinValue);
}
// if language is English, update master file and the English file if it exists (do not create new)
if ("en".equals(lang)) {
config.save(masterFile);
// perform post-processing for the output file
org.exoplatform.crowdin.utils.FileUtils.replaceCharactersInFile(masterFile, "config/special_character_processing.properties", "UpdateSourceSpecialCharacters");
if (new File(entryName).exists()) {
config.save(entryName);
//use java
org.exoplatform.crowdin.utils.FileUtils.replaceCharactersInFile(entryName, "config/special_character_processing.properties", "UpdateSourceSpecialCharacters");
}
} else {
// always create new (or update) for other languages
config.save(entryName);
//user java
org.exoplatform.crowdin.utils.FileUtils.replaceCharactersInFile(entryName, "config/special_character_processing.properties", "UpdateSourceSpecialCharacters");
}
}
zipinputstream.closeEntry();
} catch (Exception e) {
getLog().warn("Error while applying change for " + zipentryName + " - " + fileName + " : " + e.getMessage());
}
zipentry = zipinputstream.getNextEntry();
}// while
zipinputstream.close();
} catch (Exception e) {
getLog().error("Update aborted !", e);
}
}
|
private void applyTranslations(File _destFolder, String _zipFile, String locale) {
try {
byte[] buf = new byte[1024];
ZipInputStream zipinputstream = null;
ZipEntry zipentry;
zipinputstream = new ZipInputStream(new FileInputStream(_zipFile));
zipentry = zipinputstream.getNextEntry();
while (zipentry != null) {
// for each entry to be extracted
if (zipentry.isDirectory()) {
zipentry = zipinputstream.getNextEntry();
continue;
}
String zipentryName = zipentry.getName();
getLog().debug("Processing : " + zipentryName);
zipentryName = CrowdinFileFactory.encodeMinusCharacterInPath(zipentryName, false);
zipentryName = zipentryName.replace('/', File.separatorChar);
zipentryName = zipentryName.replace('\\', File.separatorChar);
String[] path = zipentryName.split(File.separator);
String lang = path[0];
String crowdinProj = path[1];
String proj = path[2];
String fileName = "";
// process only the languages specified
if (!(lang.equalsIgnoreCase(locale))) {
zipentry = zipinputstream.getNextEntry();
continue;
}
try {
String cp = crowdinProj + File.separator + proj;
Properties currentProj = getProperties().get(proj);
// ignore projects that is not managed by the plugin
if (currentProj == null) {
zipentry = zipinputstream.getNextEntry();
continue;
}
String key = zipentryName.substring(zipentryName.indexOf(cp) + cp.length() + 1);
String value = currentProj.getProperty(key);
if (value == null) {
zipentry = zipinputstream.getNextEntry();
continue;
}
/**
* if android, don't save to default master folder but save to
* "values-language" folder (for example) res/values/strings.xml > res/values-fr/strings.xml
*/
if (zipentryName.contains("android")) {
if (!locale.contains("en")) {
String localizable = CrowdinTranslation.encodeAndroidLocale(locale);
value = value.replace("res/values/", "res/values-" + localizable + "/");
}
}
/**
* if iOS, don't save to default master folder but save to
* "language.proj" folder (for example)
* /Resources/en.lproj/Localizable.string > /Resources/fr.lproj/Localizable.string
*/
else if (zipentryName.contains("ios")) {
if (!locale.contains("en")) {
String localizable = CrowdinTranslation.encodeIOSLocale(locale);
value = value.replace("en.lproj", localizable + ".lproj");
}
}
zipentryName = zipentryName.substring(0, zipentryName.indexOf(proj) + proj.length());
lang = CrowdinTranslation.encodeLanguageName(lang, false);
fileName = value.substring(value.lastIndexOf(File.separatorChar) + 1);
getLog().info("Updating " + zipentryName + " - " + value.substring(0, value.lastIndexOf(File.separatorChar) + 1) + fileName);
String name = fileName.substring(0, fileName.lastIndexOf("."));
String extension = fileName.substring(fileName.lastIndexOf("."));
if (name.lastIndexOf("_en") > 0) {
name = name.substring(0, name.lastIndexOf("_en"));
}
if (key.contains("gadget") || value.contains("gadget")) {
if ("default".equalsIgnoreCase(name)) {
fileName = lang + extension;
} else if (name.contains("_ALL")) {
fileName = lang + "_ALL" + extension;
} else {
fileName = name + "_" + lang + extension;
}
}
//if android, don't change xml to properties
else if (zipentryName.contains("android") ){
fileName = name + ".xml";
}
//if iOS
else if(zipentryName.contains("ios") ){
fileName = name + extension;
}
else {
fileName = name + "_" + lang + extension;
}
String parentDir = _destFolder + File.separator + proj + File.separator + value.substring(0, value.lastIndexOf(File.separatorChar) + 1);
getLog().debug("parentDir : " + parentDir);
parentDir = parentDir.replace('/', File.separatorChar).replace('\\', File.separatorChar);
String entryName = parentDir + fileName;
Type resourceBundleType = (key.indexOf("gadget") >= 0) ? Type.GADGET : Type.PORTLET;
File newFile = new File(entryName.substring(0, entryName.lastIndexOf(File.separatorChar)));
newFile.mkdirs();
// Need improve, some portlets in CS use xml format for vi, ar locales
boolean isXML = (entryName.indexOf(".xml") > 0);
if (isXML) {
//if is Android resouce bundle
if(zipentryName.contains("mobile") && zipentryName.contains("android")){
String resourceTranslationFilePath = parentDir + name + extension;
String localizable = CrowdinTranslation.encodeAndroidLocale(locale);
String masterFilePath = resourceTranslationFilePath.replaceAll("res/values-" + localizable, "res/values");
//create temporary file to persists zipinputstream
int n;
FileOutputStream fileoutputstream;
fileoutputstream = new FileOutputStream(resourceTranslationFilePath+".ziptempo");
while ((n = zipinputstream.read(buf, 0, 1024)) > -1) {
fileoutputstream.write(buf, 0, n);
}
fileoutputstream.close();
String crowdinFilePath = resourceTranslationFilePath + ".ziptempo";
FileInputStream input = new FileInputStream(crowdinFilePath);
XMLResourceBundleUtils.setLog(getLog());
XMLResourceBundleUtils.injectTranslation(input, resourceTranslationFilePath, masterFilePath);
//delete ziptempo file
try{
File file = new File(crowdinFilePath);
if(file.delete()){
if(getLog().isDebugEnabled())
getLog().debug(file.getName() + " is deleted!");
}else{
if(getLog().isDebugEnabled())
getLog().debug("Delete operation is failed.");
}
}catch(Exception e){
getLog().error(e);
}
}
else{
// create the temporary properties file to be used for PropsToXML (use the file in Crowdin zip)
//if not in mobile project, convert xml to properties
if (!zipentryName.contains("mobile")) {
entryName = entryName.replaceAll(".xml", ".properties");
}
int n;
FileOutputStream fileoutputstream;
fileoutputstream = new FileOutputStream(entryName);
while ((n = zipinputstream.read(buf, 0, 1024)) > -1) {
fileoutputstream.write(buf, 0, n);
}
fileoutputstream.close();
File propertiesFile = new File(entryName);
// don't convert to ascii in mobile project
if (!zipentryName.contains("mobile")) {
PropsToXML.execShellCommand("native2ascii -encoding UTF8 " + propertiesFile.getPath()
+ " " + propertiesFile.getPath());
}
PropsToXML.parse(propertiesFile.getPath(), resourceBundleType);
propertiesFile.delete();
}
}
// when project is iOS
else if(zipentryName.contains("ios")){
String localFile = parentDir + name + extension;
String localizable = CrowdinTranslation.encodeIOSLocale(locale);
String masterFile = localFile.replace(localizable + ".lproj", "en.lproj");
String resourceTranslationFilePath = localFile;
//Write tempo zipinputstream
int n;
FileOutputStream fileoutputstream;
fileoutputstream = new FileOutputStream(resourceTranslationFilePath+".ziptempo");
while ((n = zipinputstream.read(buf, 0, 1024)) > -1) {
fileoutputstream.write(buf, 0, n);
}
fileoutputstream.close();
String crowdinFilePath = resourceTranslationFilePath + ".ziptempo";
//master file code base EN
String resourceMasterFilePath = masterFile;
//translation file code base LANGUAGE
IOSResouceBundleFileUtils.setLog(getLog());
IOSResouceBundleFileUtils.injectTranslation(crowdinFilePath, resourceMasterFilePath, resourceTranslationFilePath);
} else {
// identify the master properties file
String masterFile = parentDir + name + extension;
if (!new File(masterFile).exists())
masterFile = parentDir + name + "_en" + extension;
if (!new File(masterFile).exists())
throw new FileNotFoundException("Cannot create or update " + entryName + " as the master file " + name + extension + " (or " + name + "_en" + extension + ")" + " does not exist!");
// use the master file as a skeleton and fill in with translations from Crowdin
PropertiesConfiguration config = new PropertiesConfiguration(masterFile);
PropertiesConfiguration.setDefaultListDelimiter('=');
config.setEncoding("UTF-8");
Properties propsCrowdin = new Properties();
propsCrowdin.load(zipinputstream);
Properties props = new Properties();
props.load(new FileInputStream(new File(masterFile)));
Enumeration e = props.propertyNames();
while (e.hasMoreElements()) {
String propKey = (String) e.nextElement();
String crowdinValue = propsCrowdin.getProperty(propKey);
if (null != crowdinValue && crowdinValue.length() > 0)
config.setProperty(propKey, crowdinValue);
}
// if language is English, update master file and the English file if it exists (do not create new)
if ("en".equals(lang)) {
config.save(masterFile);
// perform post-processing for the output file
org.exoplatform.crowdin.utils.FileUtils.replaceCharactersInFile(masterFile, "config/special_character_processing.properties", "UpdateSourceSpecialCharacters");
if (new File(entryName).exists()) {
config.save(entryName);
//use java
org.exoplatform.crowdin.utils.FileUtils.replaceCharactersInFile(entryName, "config/special_character_processing.properties", "UpdateSourceSpecialCharacters");
}
} else {
// always create new (or update) for other languages
config.save(entryName);
//user java
org.exoplatform.crowdin.utils.FileUtils.replaceCharactersInFile(entryName, "config/special_character_processing.properties", "UpdateSourceSpecialCharacters");
}
}
zipinputstream.closeEntry();
} catch (Exception e) {
getLog().warn("Error while applying change for " + zipentryName + " - " + fileName + " : " + e.getMessage());
}
zipentry = zipinputstream.getNextEntry();
}// while
zipinputstream.close();
} catch (Exception e) {
getLog().error("Update aborted !", e);
}
}
|
diff --git a/ui/plugins/eu.esdihumboldt.hale.ui/src/eu/esdihumboldt/hale/ui/io/IOWizard.java b/ui/plugins/eu.esdihumboldt.hale.ui/src/eu/esdihumboldt/hale/ui/io/IOWizard.java
index 8f36c3fb0..63b270729 100644
--- a/ui/plugins/eu.esdihumboldt.hale.ui/src/eu/esdihumboldt/hale/ui/io/IOWizard.java
+++ b/ui/plugins/eu.esdihumboldt.hale.ui/src/eu/esdihumboldt/hale/ui/io/IOWizard.java
@@ -1,694 +1,694 @@
/*
* Copyright (c) 2012 Data Harmonisation Panel
*
* All rights reserved. This program and the accompanying materials are made
* available under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution. If not, see <http://www.gnu.org/licenses/>.
*
* Contributors:
* HUMBOLDT EU Integrated Project #030962
* Data Harmonisation Panel <http://www.dhpanel.eu>
*/
package eu.esdihumboldt.hale.ui.io;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.content.IContentType;
import org.eclipse.jface.dialogs.IPageChangingListener;
import org.eclipse.jface.dialogs.PageChangingEvent;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.wizard.IWizardPage;
import org.eclipse.jface.wizard.Wizard;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.ui.PlatformUI;
import com.google.common.base.Objects;
import com.google.common.collect.Multimap;
import de.cs3d.util.eclipse.extension.ExtensionObjectFactoryCollection;
import de.cs3d.util.eclipse.extension.FactoryFilter;
import de.cs3d.util.logging.ALogger;
import de.cs3d.util.logging.ALoggerFactory;
import de.cs3d.util.logging.ATransaction;
import eu.esdihumboldt.hale.common.core.io.IOAdvisor;
import eu.esdihumboldt.hale.common.core.io.IOProvider;
import eu.esdihumboldt.hale.common.core.io.IOProviderConfigurationException;
import eu.esdihumboldt.hale.common.core.io.ImportProvider;
import eu.esdihumboldt.hale.common.core.io.ProgressMonitorIndicator;
import eu.esdihumboldt.hale.common.core.io.extension.IOProviderDescriptor;
import eu.esdihumboldt.hale.common.core.io.extension.IOProviderExtension;
import eu.esdihumboldt.hale.common.core.io.project.model.IOConfiguration;
import eu.esdihumboldt.hale.common.core.io.project.model.Project;
import eu.esdihumboldt.hale.common.core.io.report.IOReport;
import eu.esdihumboldt.hale.common.core.io.report.IOReporter;
import eu.esdihumboldt.hale.common.core.io.report.impl.IOMessageImpl;
import eu.esdihumboldt.hale.ui.io.action.ActionUI;
import eu.esdihumboldt.hale.ui.io.action.ActionUIExtension;
import eu.esdihumboldt.hale.ui.io.config.AbstractConfigurationPage;
import eu.esdihumboldt.hale.ui.io.config.ConfigurationPageExtension;
import eu.esdihumboldt.hale.ui.service.project.ProjectService;
import eu.esdihumboldt.hale.ui.service.report.ReportService;
/**
* Abstract I/O wizard based on {@link IOProvider} descriptors
*
* @param <P> the {@link IOProvider} type used in the wizard
*
* @author Simon Templer
* @partner 01 / Fraunhofer Institute for Computer Graphics Research
*/
public abstract class IOWizard<P extends IOProvider> extends Wizard implements
IPageChangingListener {
private static final ALogger log = ALoggerFactory.getLogger(IOWizard.class);
private final Set<IOWizardListener<P, ? extends IOWizard<P>>> listeners = new HashSet<IOWizardListener<P, ? extends IOWizard<P>>>();
private final Class<P> providerType;
private P provider;
private IOProviderDescriptor descriptor;
private IOAdvisor<P> advisor;
private String actionId;
private IContentType contentType;
private Multimap<String, AbstractConfigurationPage<? extends P, ? extends IOWizard<P>>> configPages;
private final List<IWizardPage> mainPages = new ArrayList<IWizardPage>();
/**
* Create an I/O wizard
*
* @param providerType the I/O provider type
*/
public IOWizard(Class<P> providerType) {
super();
this.providerType = providerType;
// create possible configuration pages
configPages = ConfigurationPageExtension.getInstance()
.getConfigurationPages(getFactories());
setNeedsProgressMonitor(true);
}
/**
* Get the I/O advisor
*
* @return the advisor
*/
protected IOAdvisor<P> getAdvisor() {
return advisor;
}
/**
* Get the action identifier
*
* @return the action ID
*/
protected String getActionId() {
return actionId;
}
/**
* Set the I/O advisor
*
* @param advisor the advisor to set
* @param actionId the action identifier, <code>null</code> if it has none
*/
public void setAdvisor(IOAdvisor<P> advisor, String actionId) {
this.advisor = advisor;
this.actionId = actionId;
// recreate possible configuration pages now that advisor is set
configPages = ConfigurationPageExtension.getInstance()
.getConfigurationPages(getFactories());
}
/**
* @see Wizard#addPages()
*/
@Override
public void addPages() {
super.addPages();
// add configuration pages
for (AbstractConfigurationPage<? extends P, ? extends IOWizard<P>> page : configPages
.values()) {
addPage(page);
}
if (getContainer() instanceof WizardDialog) {
((WizardDialog) getContainer()).addPageChangingListener(this);
}
else {
throw new RuntimeException("Only WizardDialog as container supported");
}
}
/**
* @see IPageChangingListener#handlePageChanging(PageChangingEvent)
*/
@Override
public void handlePageChanging(PageChangingEvent event) {
if (getProvider() == null) {
return;
}
if (event.getCurrentPage() instanceof IWizardPage
&& event.getTargetPage() == getNextPage((IWizardPage) event.getCurrentPage())) {
// only do automatic configuration when proceeding to next page
if (event.getCurrentPage() instanceof IOWizardPage<?, ?>) {
@SuppressWarnings("unchecked")
IOWizardPage<P, ?> page = (IOWizardPage<P, ?>) event.getCurrentPage();
event.doit = validatePage(page);
// TODO error message?!
}
}
}
/**
* @see Wizard#dispose()
*/
@Override
public void dispose() {
if (getContainer() instanceof WizardDialog) {
((WizardDialog) getContainer()).removePageChangingListener(this);
}
super.dispose();
}
/**
* @see Wizard#addPage(IWizardPage)
*/
@Override
public void addPage(IWizardPage page) {
// collect main pages
if (!configPages.containsValue(page)) {
mainPages.add(page);
}
super.addPage(page);
}
/**
* Get the list of configuration pages for the currently selected provider
* factory <code>null</code> if there are none.
*
* @return the configuration pages for the current provider
*/
protected List<AbstractConfigurationPage<? extends P, ? extends IOWizard<P>>> getConfigurationPages() {
if (descriptor == null) {
return null;
}
// get the provider id
String id = descriptor.getIdentifier();
List<AbstractConfigurationPage<? extends P, ? extends IOWizard<P>>> result = new ArrayList<AbstractConfigurationPage<? extends P, ? extends IOWizard<P>>>(
configPages.get(id));
return (result.size() > 0 ? result : null);
}
/**
* @see Wizard#canFinish()
*/
@Override
public boolean canFinish() {
// check if main pages are complete
for (int i = 0; i < mainPages.size(); i++) {
if (!(mainPages.get(i)).isPageComplete()) {
return false;
}
}
// check if configuration pages are complete
List<AbstractConfigurationPage<? extends P, ? extends IOWizard<P>>> confPages = getConfigurationPages();
if (confPages != null) {
for (int i = 0; i < confPages.size(); i++) {
if (!(confPages.get(i)).isPageComplete()) {
return false;
}
}
}
return true;
}
/**
* @see Wizard#getNextPage(IWizardPage)
*/
@Override
public IWizardPage getNextPage(IWizardPage page) {
// get main index
int mainIndex = mainPages.indexOf(page);
if (mainIndex >= 0) {
// current page is one of the main pages
if (mainIndex < mainPages.size() - 1) {
// next main page
return mainPages.get(mainIndex + 1);
}
else {
// first configuration page
List<AbstractConfigurationPage<? extends P, ? extends IOWizard<P>>> confPages = getConfigurationPages();
if (confPages != null && confPages.size() > 0) {
return confPages.get(0);
}
}
}
else {
// current page is a configuration page
List<AbstractConfigurationPage<? extends P, ? extends IOWizard<P>>> confPages = getConfigurationPages();
// return the next configuration page
if (confPages != null) {
for (int i = 0; i < confPages.size() - 1; ++i) {
if (confPages.get(i) == page) {
return confPages.get(i + 1);
}
}
}
}
return null;
}
/**
* @see Wizard#getPageCount()
*/
@Override
public int getPageCount() {
int count = mainPages.size();
List<AbstractConfigurationPage<? extends P, ? extends IOWizard<P>>> confPages = getConfigurationPages();
if (confPages != null) {
count += confPages.size();
}
return count;
}
/**
* @see Wizard#getPreviousPage(IWizardPage)
*/
@Override
public IWizardPage getPreviousPage(IWizardPage page) {
// get main index
int mainIndex = mainPages.indexOf(page);
if (mainIndex >= 0) {
// current page is one of the main pages
if (mainIndex > 0) {
// previous main page
return mainPages.get(mainIndex - 1);
}
}
else {
// current page is a configuration page
List<AbstractConfigurationPage<? extends P, ? extends IOWizard<P>>> confPages = getConfigurationPages();
if (confPages != null) {
if (confPages.size() > 0 && confPages.get(0) == page) {
// return last main page
return mainPages.get(mainPages.size() - 1);
}
// return the previous configuration page
for (int i = 1; i < confPages.size(); ++i) {
if (confPages.get(i) == page) {
return confPages.get(i - 1);
}
}
}
}
return null;
}
/**
* @see Wizard#getStartingPage()
*/
@Override
public IWizardPage getStartingPage() {
return mainPages.get(0);
}
/**
* Get the available provider descriptors. To filter or sort them you can
* override this method.
*
* @return the available provider descriptors
*/
public List<IOProviderDescriptor> getFactories() {
// FIXME rename method
return IOProviderExtension.getInstance().getFactories(
new FactoryFilter<IOProvider, IOProviderDescriptor>() {
@Override
public boolean acceptFactory(IOProviderDescriptor factory) {
// accept all factories that provide a compatible I/O
// provider
return providerType.isAssignableFrom(factory.getProviderType());
}
@Override
public boolean acceptCollection(
ExtensionObjectFactoryCollection<IOProvider, IOProviderDescriptor> collection) {
return true;
}
});
}
/**
* Get the provider assigned to the wizard. It will be <code>null</code> if
* no page assigned a provider factory to the wizard yet.
*
* @return the I/O provider
*/
@SuppressWarnings("unchecked")
public P getProvider() {
if (provider == null && descriptor != null) {
try {
provider = (P) descriptor.createExtensionObject();
} catch (Exception e) {
throw new IllegalStateException("Could not instantiate I/O provider", e);
}
advisor.prepareProvider(provider);
}
return provider;
}
/**
* Assign an I/O provider factory to the wizard
*
* @param descriptor the provider factory to set
*/
public void setProviderFactory(IOProviderDescriptor descriptor) {
if (Objects.equal(descriptor, this.descriptor))
return;
// disable old configuration pages
List<AbstractConfigurationPage<? extends P, ? extends IOWizard<P>>> pages = getConfigurationPages();
if (pages != null) {
for (AbstractConfigurationPage<? extends P, ? extends IOWizard<P>> page : pages) {
page.disable();
}
}
this.descriptor = descriptor;
// reset provider
provider = null;
// enable new configuration pages
pages = getConfigurationPages();
if (pages != null) {
for (AbstractConfigurationPage<? extends P, ? extends IOWizard<P>> page : pages) {
page.enable();
}
}
fireProviderFactoryChanged(descriptor);
}
/**
* Get the content type assigned to the wizard
*
* @return the content type, may be <code>null</code>
*/
public IContentType getContentType() {
return contentType;
}
/**
* Assign a content type to the wizard
*
* @param contentType the content type to set
*/
public void setContentType(IContentType contentType) {
if (Objects.equal(contentType, this.contentType))
return;
this.contentType = contentType;
fireContentTypeChanged(contentType);
}
/**
* Get the provider descriptor assigned to the wizard. It will be
* <code>null</code> if no page assigned a provider factory to the wizard
* yet.
*
* @return the I/O provider factory
*/
public IOProviderDescriptor getProviderFactory() {
return descriptor;
}
/**
* @see Wizard#performFinish()
*
* @return <code>true</code> if executing the I/O provider was successful
*/
@Override
public boolean performFinish() {
if (getProvider() == null) {
return false;
}
// process main pages
for (int i = 0; i < mainPages.size(); i++) {
// validating is still necessary as it is not guaranteed to be up to
// date by handlePageChanging
boolean valid = validatePage(mainPages.get(i));
if (!valid) {
// TODO error message?!
return false;
}
}
// check if configuration pages are complete
List<AbstractConfigurationPage<? extends P, ? extends IOWizard<P>>> confPages = getConfigurationPages();
if (confPages != null) {
for (int i = 0; i < confPages.size(); i++) {
// validating is still necessary as it is not guaranteed to be
// up to date by handlePageChanging
boolean valid = validatePage(confPages.get(i));
if (!valid) {
// TODO error message?!
return false;
}
}
}
// process wizard
updateConfiguration(provider);
// create default report
IOReporter defReport = provider.createReporter();
// validate and execute provider
try {
// validate configuration
provider.validate();
ProjectService ps = null;
if (actionId != null) {
// XXX instead move project resource to action?
ActionUI factory = ActionUIExtension.getInstance().findActionUI(actionId);
if (factory.isProjectResource()) {
ps = (ProjectService) PlatformUI.getWorkbench()
.getService(ProjectService.class);
// prevent loading of duplicate resources
if (provider instanceof ImportProvider) {
String currentResource = ((ImportProvider) provider).getSource()
.getLocation().toASCIIString();
List<IOConfiguration> resources = ((Project) ps.getProjectInfo())
.getResources();
for (IOConfiguration conf : resources) {
- String resource = conf.getProviderConfiguration().get(
- ImportProvider.PARAM_SOURCE);
+ String resource = conf.getProviderConfiguration()
+ .get(ImportProvider.PARAM_SOURCE).as(String.class);
// resource is already loaded into the project
if (resource != null && resource.equals(currentResource)) {
log.userError("Resource is already loaded. Loading duplicate resources is aborted!");
return false;
}
}
}
}
}
IOReport report = execute(provider, defReport);
if (report != null) {
// add report to report server
ReportService repService = (ReportService) PlatformUI.getWorkbench().getService(
ReportService.class);
repService.addReport(report);
// show message to user
if (report.isSuccess()) {
// no message, we rely on the report being shown/processed
// let advisor handle results
advisor.handleResults(getProvider());
// add to project service if necessary
if (ps != null)
ps.rememberIO(actionId, getProviderFactory().getIdentifier(), provider);
return true;
}
else {
// error message
log.userError(report.getSummary() + "\nPlease see the report for details.");
return false;
}
}
else
return true;
} catch (IOProviderConfigurationException e) {
// user feedback
log.userError(
"Validation of the provider configuration failed:\n" + e.getLocalizedMessage(),
e);
return false;
}
}
/**
* Execute the given provider
*
* @param provider the I/O provider
* @param defaultReporter the default reporter that is used if the provider
* doesn't supply a report
* @return the execution report, if null it will not give feedback to the
* user and the advisor's handleResult method won't be called either
*/
protected IOReport execute(final IOProvider provider, final IOReporter defaultReporter) {
// execute provider
final AtomicReference<IOReport> report = new AtomicReference<IOReport>(defaultReporter);
defaultReporter.setSuccess(false);
try {
getContainer().run(true, provider.isCancelable(), new IRunnableWithProgress() {
@Override
public void run(IProgressMonitor monitor) throws InvocationTargetException,
InterruptedException {
ATransaction trans = log.begin(defaultReporter.getTaskName());
try {
IOReport result = provider.execute(new ProgressMonitorIndicator(monitor));
if (result != null) {
report.set(result);
}
else {
defaultReporter.setSuccess(true);
}
} catch (Throwable e) {
defaultReporter.error(new IOMessageImpl(e.getLocalizedMessage(), e));
} finally {
trans.end();
}
}
});
} catch (Throwable e) {
defaultReporter.error(new IOMessageImpl(e.getLocalizedMessage(), e));
}
return report.get();
}
/**
* Update the provider configuration. This will be called just before the
* I/O provider is executed.
*
* @param provider the I/O provider
*/
protected void updateConfiguration(P provider) {
// set the content type
provider.setContentType(getContentType());
// let advisor update configuration
advisor.updateConfiguration(provider);
}
/**
* Validate the given page and update the I/O provider
*
* @param page the wizard page to validate
* @return if the page is valid and updating the I/O provider was successful
*/
@SuppressWarnings("unchecked")
protected boolean validatePage(IWizardPage page) {
if (page instanceof IOWizardPage<?, ?>) {
return ((IOWizardPage<P, ?>) page).updateConfiguration(provider);
}
else {
return true;
}
}
/**
* Get the supported I/O provider type, usually an interface.
*
* @return the supported I/O provider type
*/
public Class<P> getProviderType() {
return providerType;
}
/**
* Adds an {@link IOWizardListener}
*
* @param listener the listener to add
*/
public void addIOWizardListener(IOWizardListener<P, ? extends IOWizard<P>> listener) {
synchronized (listeners) {
listeners.add(listener);
}
}
/**
* Removes an {@link IOWizardListener}
*
* @param listener the listener to remove
*/
public void removeIOWizardListener(IOWizardListener<P, ? extends IOWizard<P>> listener) {
synchronized (listeners) {
listeners.remove(listener);
}
}
private void fireProviderFactoryChanged(IOProviderDescriptor providerFactory) {
synchronized (listeners) {
for (IOWizardListener<P, ? extends IOWizard<P>> listener : listeners) {
listener.providerDescriptorChanged(providerFactory);
}
}
}
private void fireContentTypeChanged(IContentType contentType) {
synchronized (listeners) {
for (IOWizardListener<P, ? extends IOWizard<P>> listener : listeners) {
listener.contentTypeChanged(contentType);
}
}
}
}
| true
| true
|
public boolean performFinish() {
if (getProvider() == null) {
return false;
}
// process main pages
for (int i = 0; i < mainPages.size(); i++) {
// validating is still necessary as it is not guaranteed to be up to
// date by handlePageChanging
boolean valid = validatePage(mainPages.get(i));
if (!valid) {
// TODO error message?!
return false;
}
}
// check if configuration pages are complete
List<AbstractConfigurationPage<? extends P, ? extends IOWizard<P>>> confPages = getConfigurationPages();
if (confPages != null) {
for (int i = 0; i < confPages.size(); i++) {
// validating is still necessary as it is not guaranteed to be
// up to date by handlePageChanging
boolean valid = validatePage(confPages.get(i));
if (!valid) {
// TODO error message?!
return false;
}
}
}
// process wizard
updateConfiguration(provider);
// create default report
IOReporter defReport = provider.createReporter();
// validate and execute provider
try {
// validate configuration
provider.validate();
ProjectService ps = null;
if (actionId != null) {
// XXX instead move project resource to action?
ActionUI factory = ActionUIExtension.getInstance().findActionUI(actionId);
if (factory.isProjectResource()) {
ps = (ProjectService) PlatformUI.getWorkbench()
.getService(ProjectService.class);
// prevent loading of duplicate resources
if (provider instanceof ImportProvider) {
String currentResource = ((ImportProvider) provider).getSource()
.getLocation().toASCIIString();
List<IOConfiguration> resources = ((Project) ps.getProjectInfo())
.getResources();
for (IOConfiguration conf : resources) {
String resource = conf.getProviderConfiguration().get(
ImportProvider.PARAM_SOURCE);
// resource is already loaded into the project
if (resource != null && resource.equals(currentResource)) {
log.userError("Resource is already loaded. Loading duplicate resources is aborted!");
return false;
}
}
}
}
}
IOReport report = execute(provider, defReport);
if (report != null) {
// add report to report server
ReportService repService = (ReportService) PlatformUI.getWorkbench().getService(
ReportService.class);
repService.addReport(report);
// show message to user
if (report.isSuccess()) {
// no message, we rely on the report being shown/processed
// let advisor handle results
advisor.handleResults(getProvider());
// add to project service if necessary
if (ps != null)
ps.rememberIO(actionId, getProviderFactory().getIdentifier(), provider);
return true;
}
else {
// error message
log.userError(report.getSummary() + "\nPlease see the report for details.");
return false;
}
}
else
return true;
} catch (IOProviderConfigurationException e) {
// user feedback
log.userError(
"Validation of the provider configuration failed:\n" + e.getLocalizedMessage(),
e);
return false;
}
}
|
public boolean performFinish() {
if (getProvider() == null) {
return false;
}
// process main pages
for (int i = 0; i < mainPages.size(); i++) {
// validating is still necessary as it is not guaranteed to be up to
// date by handlePageChanging
boolean valid = validatePage(mainPages.get(i));
if (!valid) {
// TODO error message?!
return false;
}
}
// check if configuration pages are complete
List<AbstractConfigurationPage<? extends P, ? extends IOWizard<P>>> confPages = getConfigurationPages();
if (confPages != null) {
for (int i = 0; i < confPages.size(); i++) {
// validating is still necessary as it is not guaranteed to be
// up to date by handlePageChanging
boolean valid = validatePage(confPages.get(i));
if (!valid) {
// TODO error message?!
return false;
}
}
}
// process wizard
updateConfiguration(provider);
// create default report
IOReporter defReport = provider.createReporter();
// validate and execute provider
try {
// validate configuration
provider.validate();
ProjectService ps = null;
if (actionId != null) {
// XXX instead move project resource to action?
ActionUI factory = ActionUIExtension.getInstance().findActionUI(actionId);
if (factory.isProjectResource()) {
ps = (ProjectService) PlatformUI.getWorkbench()
.getService(ProjectService.class);
// prevent loading of duplicate resources
if (provider instanceof ImportProvider) {
String currentResource = ((ImportProvider) provider).getSource()
.getLocation().toASCIIString();
List<IOConfiguration> resources = ((Project) ps.getProjectInfo())
.getResources();
for (IOConfiguration conf : resources) {
String resource = conf.getProviderConfiguration()
.get(ImportProvider.PARAM_SOURCE).as(String.class);
// resource is already loaded into the project
if (resource != null && resource.equals(currentResource)) {
log.userError("Resource is already loaded. Loading duplicate resources is aborted!");
return false;
}
}
}
}
}
IOReport report = execute(provider, defReport);
if (report != null) {
// add report to report server
ReportService repService = (ReportService) PlatformUI.getWorkbench().getService(
ReportService.class);
repService.addReport(report);
// show message to user
if (report.isSuccess()) {
// no message, we rely on the report being shown/processed
// let advisor handle results
advisor.handleResults(getProvider());
// add to project service if necessary
if (ps != null)
ps.rememberIO(actionId, getProviderFactory().getIdentifier(), provider);
return true;
}
else {
// error message
log.userError(report.getSummary() + "\nPlease see the report for details.");
return false;
}
}
else
return true;
} catch (IOProviderConfigurationException e) {
// user feedback
log.userError(
"Validation of the provider configuration failed:\n" + e.getLocalizedMessage(),
e);
return false;
}
}
|
diff --git a/sip-core/src/main/java/eu/delving/metadata/NodeMapping.java b/sip-core/src/main/java/eu/delving/metadata/NodeMapping.java
index a2620cde..908be069 100644
--- a/sip-core/src/main/java/eu/delving/metadata/NodeMapping.java
+++ b/sip-core/src/main/java/eu/delving/metadata/NodeMapping.java
@@ -1,391 +1,387 @@
/*
* Copyright 2011, 2012 Delving BV
*
* Licensed under the EUPL, Version 1.0 or? as soon they
* will be approved by the European Commission - subsequent
* versions of the EUPL (the "Licence");
* you may not use this work except in compliance with the
* Licence.
* You may obtain a copy of the Licence at:
*
* http://ec.europa.eu/idabc/eupl
*
* Unless required by applicable law or agreed to in
* writing, software distributed under the Licence is
* distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the Licence for the specific language governing
* permissions and limitations under the Licence.
*/
package eu.delving.metadata;
import com.thoughtworks.xstream.annotations.XStreamAlias;
import com.thoughtworks.xstream.annotations.XStreamAsAttribute;
import com.thoughtworks.xstream.annotations.XStreamOmitField;
import java.util.*;
import static eu.delving.metadata.NodeMappingChange.CODE;
import static eu.delving.metadata.NodeMappingChange.DOCUMENTATION;
import static eu.delving.metadata.StringUtil.*;
/**
* This class describes how one node is transformed into another, which is part of mapping
* one hierarchy onto another. It can contain a dictionary, as well as a snippet
* of Groovy code.
* <p/>
* Instances of this class are placed in the RecDefNode elements of the record definition
* so that that data structure can be used as a scaffolding to recursively write the code
* for the Groovy builder.
* <p/>
* Instances are also stored in a list in the RecMapping, and upon reading a mapping they
* are distributed into the local prototype instance of the record definition data structure.
*
* @author Gerald de Jong <gerald@delving.eu>
*/
@XStreamAlias("node-mapping")
public class NodeMapping {
@XStreamAsAttribute
public Path inputPath;
@XStreamAsAttribute
public Path outputPath;
public List<Path> siblings;
@XStreamAsAttribute
public Operator operator;
public Map<String, String> dictionary;
@XStreamAlias("groovy-code")
public List<String> groovyCode;
public List<String> documentation;
@XStreamOmitField
public RecDefNode recDefNode;
@XStreamOmitField
public CodeOut codeOut;
@XStreamOmitField
private SortedSet sourceTreeNodes;
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
NodeMapping that = (NodeMapping) o;
if (inputPath != null ? !inputPath.equals(that.inputPath) : that.inputPath != null) return false;
if (outputPath != null ? !outputPath.equals(that.outputPath) : that.outputPath != null) return false;
return true;
}
@Override
public int hashCode() {
return inputPath.hashCode();
}
public String getDocumentation() {
return linesToString(documentation);
}
public void setDocumentation(String documentation) {
this.documentation = stringToLines(documentation);
notifyChanged(DOCUMENTATION);
}
public Operator getOperator() {
if (recDefNode.hasOperator()) return recDefNode.getOperator();
return operator == null ? Operator.ALL : operator;
}
public void clearStatsTreeNodes() {
sourceTreeNodes = null;
}
public boolean hasMap() {
return siblings != null;
}
public boolean hasSourceTreeNodes() {
return sourceTreeNodes != null;
}
public boolean hasOneSourceTreeNode() {
return hasSourceTreeNodes() && sourceTreeNodes.size() == 1;
}
public Object getSingleSourceTreeNode() {
Iterator walk = sourceTreeNodes.iterator();
return walk.hasNext() ? walk.next() : null;
}
public SortedSet getSourceTreeNodes() {
return sourceTreeNodes;
}
public void attachTo(RecDefNode recDefNode) {
this.recDefNode = recDefNode;
this.outputPath = recDefNode.getPath();
}
// this method should be called from exactly ONE place!
public NodeMapping setStatsTreeNodes(SortedSet statsTreeNodes, List<Path> inputPaths) {
if (statsTreeNodes.isEmpty()) throw new RuntimeException();
this.sourceTreeNodes = statsTreeNodes;
setInputPaths(inputPaths);
return this;
}
public NodeMapping setInputPath(Path inputPath) {
this.inputPath = inputPath;
return this;
}
public void notifyChanged(NodeMappingChange change) {
if (recDefNode != null) recDefNode.notifyNodeMappingChange(this, change);
}
public NodeMapping setInputPaths(Collection<Path> inputPaths) {
if (inputPaths.isEmpty()) throw new RuntimeException();
Path parent = null;
for (Path input : inputPaths) {
if (parent == null) {
parent = input.parent();
}
else if (!parent.equals(input.parent())) {
throw new RuntimeException(String.format("Input path %s should all be from the same parent %s", input, parent));
}
}
Iterator<Path> pathWalk = inputPaths.iterator();
this.inputPath = pathWalk.next();
if (pathWalk.hasNext()) {
siblings = new ArrayList<Path>();
while (pathWalk.hasNext()) siblings.add(pathWalk.next());
}
return this;
}
public List<Path> getInputPaths() {
List<Path> inputPaths = new ArrayList<Path>();
inputPaths.add(inputPath);
if (siblings != null) inputPaths.addAll(siblings);
Collections.sort(inputPaths);
return inputPaths;
}
public NodeMapping setOutputPath(Path outputPath) {
this.outputPath = outputPath;
return this;
}
public boolean generatedCodeLooksLike(String codeString, RecMapping recMapping) {
if (codeString == null) return false;
List<String> list = Arrays.asList(getCode(getGeneratorEditPath(), recMapping).split("\n"));
Iterator<String> walk = list.iterator();
return isSimilar(codeString, walk);
}
public boolean codeLooksLike(String codeString) {
return groovyCode == null || isSimilar(codeString, groovyCode.iterator());
}
public String getCode(EditPath editPath, RecMapping recMapping) {
recMapping.toCode(editPath);
return codeOut.toString();
}
public void revertToGenerated() {
setGroovyCode(null, null);
}
public void setGroovyCode(String codeString, RecMapping recMapping) {
if (codeString == null || generatedCodeLooksLike(codeString, recMapping)) {
if (groovyCode != null) {
groovyCode = null;
notifyChanged(CODE);
}
}
else if (groovyCode == null || !codeLooksLike(codeString)) {
groovyCode = stringToLines(codeString);
notifyChanged(CODE);
}
}
public void toAttributeCode(Stack<String> groovyParams, EditPath editPath) {
if (!recDefNode.isAttr()) return;
toUserCode(groovyParams, editPath);
}
public void toLeafElementCode(Stack<String> groovyParams, EditPath editPath) {
if (recDefNode.isAttr() || !recDefNode.isLeafElem()) return;
toUserCode(groovyParams, editPath);
}
public boolean isUserCodeEditable() {
return recDefNode.isAttr() || recDefNode.isLeafElem();
}
private boolean isSimilar(String codeString, Iterator<String> walk) {
for (String line : codeString.split("\n")) {
line = line.trim();
if (line.isEmpty()) continue;
if (!walk.hasNext()) return false;
while (walk.hasNext()) {
String otherLine = walk.next().trim();
if (otherLine.isEmpty()) continue;
if (!otherLine.equals(line)) return false;
break;
}
}
return !walk.hasNext();
}
private void toUserCode(Stack<String> groovyParams, EditPath editPath) {
if (editPath != null) {
String editedCode = editPath.getEditedCode(recDefNode.getPath());
if (editedCode != null) {
indentCode(editedCode, codeOut);
return;
}
else if (groovyCode != null) {
indentCode(groovyCode, codeOut);
return;
}
}
else if (groovyCode != null) {
indentCode(groovyCode, codeOut);
return;
}
toInnerLoop(getLocalPath(), groovyParams);
}
private void toInnerLoop(Path path, Stack<String> groovyParams) {
if (path.isEmpty()) throw new RuntimeException();
if (path.size() == 1) {
if (dictionary != null) {
codeOut.line("from%s(%s)", toDictionaryName(this), toLeafGroovyParam(path));
}
else if (hasMap()) {
codeOut.line(getMapUsage());
}
else {
+ String sanitize = recDefNode.getFieldType().equalsIgnoreCase("link") ? ".sanitizeURI()" : "";
if (path.peek().getLocalName().equals("constant")) {
codeOut.line("'CONSTANT'");
}
else if (recDefNode.hasFunction()) {
- if (recDefNode.getFieldType().equalsIgnoreCase("link")) {
- codeOut.line("\"${%s(%s).sanitizeURI()}\"", recDefNode.getFunction(), toLeafGroovyParam(path));
- }
- else {
- codeOut.line("\"${%s(%s)}\"", recDefNode.getFunction(), toLeafGroovyParam(path));
- }
+ codeOut.line("\"${%s(%s)%s}\"", recDefNode.getFunction(), toLeafGroovyParam(path), sanitize);
}
else {
- codeOut.line("\"${%s}\"", toLeafGroovyParam(path));
+ codeOut.line("\"${%s%s}\"", toLeafGroovyParam(path), sanitize);
}
}
}
else if (recDefNode.isLeafElem()) {
toInnerLoop(path.withRootRemoved(), groovyParams);
}
else {
boolean needLoop;
if (hasMap()) {
needLoop = !groovyParams.contains(getMapName());
if (needLoop) {
codeOut.line_(
"%s %s { %s ->",
toMapExpression(this), getOperator().getChar(), getMapName()
);
}
}
else {
String param = toLoopGroovyParam(path);
needLoop = !groovyParams.contains(param);
if (needLoop) {
codeOut.line_(
"%s %s { %s ->",
toLoopRef(path), getOperator().getChar(), param
);
}
}
toInnerLoop(path.withRootRemoved(), groovyParams);
if (needLoop) codeOut._line("}");
}
}
public Path getLocalPath() {
NodeMapping ancestor = getAncestorNodeMapping(inputPath);
if (ancestor.inputPath.isAncestorOf(inputPath)) {
return inputPath.extendAncestor(ancestor.inputPath);
}
else {
return inputPath;
}
}
private String getMapUsage() {
if (!hasMap()) return null;
StringBuilder usage = new StringBuilder("\"");
Iterator<Path> walk = getInputPaths().iterator();
while (walk.hasNext()) {
Path path = walk.next();
usage.append(String.format("${%s['%s']}", getMapName(), path.peek().toMapKey()));
if (walk.hasNext()) usage.append(" ");
}
usage.append("\"");
return usage.toString();
}
public String getMapName() {
return String.format("_M%d", inputPath.size());
}
public String toString() {
if (recDefNode == null) return "No RecDefNode";
String input = inputPath.getTail();
if (hasMap()) {
StringBuilder out = new StringBuilder();
Iterator<Path> walk = getInputPaths().iterator();
while (walk.hasNext()) {
out.append(walk.next().getTail());
if (walk.hasNext()) out.append(", ");
}
input = out.toString();
}
String wrap = groovyCode == null ? "p" : "b";
return String.format("<html><%s>%s → %s</%s>", wrap, input, recDefNode.toString(), wrap);
}
private NodeMapping getAncestorNodeMapping(Path path) {
for (RecDefNode ancestor = recDefNode.getParent(); ancestor != null; ancestor = ancestor.getParent()) {
for (NodeMapping nodeMapping : ancestor.getNodeMappings().values()) {
if (nodeMapping.inputPath.isAncestorOf(path)) return nodeMapping;
}
}
return new NodeMapping().setInputPath(Path.create("input")).setOutputPath(outputPath.takeFirst());
}
private EditPath getGeneratorEditPath() {
return new EditPath() {
@Override
public NodeMapping getNodeMapping() {
return NodeMapping.this;
}
@Override
public String getEditedCode(Path path) {
return null;
}
};
}
}
| false
| true
|
private void toInnerLoop(Path path, Stack<String> groovyParams) {
if (path.isEmpty()) throw new RuntimeException();
if (path.size() == 1) {
if (dictionary != null) {
codeOut.line("from%s(%s)", toDictionaryName(this), toLeafGroovyParam(path));
}
else if (hasMap()) {
codeOut.line(getMapUsage());
}
else {
if (path.peek().getLocalName().equals("constant")) {
codeOut.line("'CONSTANT'");
}
else if (recDefNode.hasFunction()) {
if (recDefNode.getFieldType().equalsIgnoreCase("link")) {
codeOut.line("\"${%s(%s).sanitizeURI()}\"", recDefNode.getFunction(), toLeafGroovyParam(path));
}
else {
codeOut.line("\"${%s(%s)}\"", recDefNode.getFunction(), toLeafGroovyParam(path));
}
}
else {
codeOut.line("\"${%s}\"", toLeafGroovyParam(path));
}
}
}
else if (recDefNode.isLeafElem()) {
toInnerLoop(path.withRootRemoved(), groovyParams);
}
else {
boolean needLoop;
if (hasMap()) {
needLoop = !groovyParams.contains(getMapName());
if (needLoop) {
codeOut.line_(
"%s %s { %s ->",
toMapExpression(this), getOperator().getChar(), getMapName()
);
}
}
else {
String param = toLoopGroovyParam(path);
needLoop = !groovyParams.contains(param);
if (needLoop) {
codeOut.line_(
"%s %s { %s ->",
toLoopRef(path), getOperator().getChar(), param
);
}
}
toInnerLoop(path.withRootRemoved(), groovyParams);
if (needLoop) codeOut._line("}");
}
}
|
private void toInnerLoop(Path path, Stack<String> groovyParams) {
if (path.isEmpty()) throw new RuntimeException();
if (path.size() == 1) {
if (dictionary != null) {
codeOut.line("from%s(%s)", toDictionaryName(this), toLeafGroovyParam(path));
}
else if (hasMap()) {
codeOut.line(getMapUsage());
}
else {
String sanitize = recDefNode.getFieldType().equalsIgnoreCase("link") ? ".sanitizeURI()" : "";
if (path.peek().getLocalName().equals("constant")) {
codeOut.line("'CONSTANT'");
}
else if (recDefNode.hasFunction()) {
codeOut.line("\"${%s(%s)%s}\"", recDefNode.getFunction(), toLeafGroovyParam(path), sanitize);
}
else {
codeOut.line("\"${%s%s}\"", toLeafGroovyParam(path), sanitize);
}
}
}
else if (recDefNode.isLeafElem()) {
toInnerLoop(path.withRootRemoved(), groovyParams);
}
else {
boolean needLoop;
if (hasMap()) {
needLoop = !groovyParams.contains(getMapName());
if (needLoop) {
codeOut.line_(
"%s %s { %s ->",
toMapExpression(this), getOperator().getChar(), getMapName()
);
}
}
else {
String param = toLoopGroovyParam(path);
needLoop = !groovyParams.contains(param);
if (needLoop) {
codeOut.line_(
"%s %s { %s ->",
toLoopRef(path), getOperator().getChar(), param
);
}
}
toInnerLoop(path.withRootRemoved(), groovyParams);
if (needLoop) codeOut._line("}");
}
}
|
diff --git a/src/State.java b/src/State.java
index f835f55..1521d43 100644
--- a/src/State.java
+++ b/src/State.java
@@ -1,125 +1,130 @@
import java.util.*;
public class State {
public State(boolean ON, Point2D location, int direction, List<Point2D> dirts)
{
this.ON = ON;
this.location = location;
this.direction = direction;
this.dirts = dirts;
}
public State(){
dirts = new ArrayList<Point2D>();
}
public boolean ON = false;
//Current coordinates
public Point2D location;
// 0 is North. We will use modular arithmetic for directions.
public int direction;
//List of dirt coordinates
public List<Point2D> dirts;
/**************Functions***********************/
public List<String> get_legal_moves(Environment env)
{
List<String> moves = new ArrayList<String>();
if(!ON)
{
moves.add("TURN_ON");
return moves;
}
// Suck, turning is always legal.
moves.add("SUCK");
moves.add("TURN_RIGHT");
moves.add("TURN_LEFT");
// We know we must be ON.
moves.add("TURN_OFF");
//If Go is inside of boundaries and not facing an obstacle add GO
switch(direction)
{
case 0: // North
if(location.y() == env.c || env.obstacles.contains(new Point2D(location.x(),location.y() + 1)))//If in northmost row, or obstacle
break;
else
moves.add("GO");
- break;
+ break;
case 1: // East
//If we are at the rightmost location
if(location.x() == env.r ||env.obstacles.contains(new Point2D(location.x() + 1, location.y()))) //If in eastmost column, or obstacle in front.
break;
else
moves.add("GO");
- break;
+ break;
case 2: // South
if(location.x() == 0 || env.obstacles.contains( new Point2D(location.x() - 1, location.y() )))//If at bottom row, or obstacle in front.
break;
else
moves.add("GO");
- break;
+ break;
case 3: // West
if(location.x() == 0 || env.obstacles.contains( new Point2D(location.x(), location.y() - 1)))//If at westmost column, or obstacle in front.
- break;
+ break;
+ else
+ moves.add("GO");
+ break;
+ default:
+ System.out.println("Something went wrong when adding GO to the list of moves :/");
}
return moves;
}
/*
TURN_ON:
TURN_RIGHT, TURN_LEFT
GO:
SUCK:
TURN_OFF
*/
public State next_state(String move)
{
if(move == "TURN_ON")
{
//Turn the robot on
return new State(true, new Point2D(location.x(),location.y()), direction, dirts);
}
if(move == "TURN_RIGHT")
{
//Create a new state the same as the old but change direction
return new State(true, new Point2D( location.x(),location.y() ), (direction + 1) % 4, dirts);
}
if(move == "TURN_LEFT")
{
//Create a new state the same as the old but change direction
//For negative numbers take modulo add 3 and take modulo again
int newDirection = (((direction-1) % 4) + 4) % 4;
return new State(true, new Point2D(location.x(),location.y()), newDirection, dirts);
}
if(move == "GO")
{
//Add one to the direction the robot is facing
if(direction == 0)
return new State(true,new Point2D(location.x(),location.y() + 1), direction, dirts);
if(direction == 1)
return new State(true,new Point2D(location.x() + 1,location.y()), direction, dirts);
if(direction == 2)
return new State(true,new Point2D(location.x(),location.y() - 1), direction, dirts);
if(direction == 3)
return new State(true,new Point2D(location.x() - 1,location.y()), direction, dirts);
}
if(move == "SUCK")
{
//Remove the dirt at point location from the list of dirts
Point2D newPoint = new Point2D(location.x(),location.y());
dirts.remove(newPoint);
return new State(true, newPoint, direction, dirts);
}
else //if(move == "TURN_OFF")
{
//Turn the robot off
return new State(false, new Point2D(location.x(),location.y()), direction, dirts);
}
}
}
| false
| true
|
public List<String> get_legal_moves(Environment env)
{
List<String> moves = new ArrayList<String>();
if(!ON)
{
moves.add("TURN_ON");
return moves;
}
// Suck, turning is always legal.
moves.add("SUCK");
moves.add("TURN_RIGHT");
moves.add("TURN_LEFT");
// We know we must be ON.
moves.add("TURN_OFF");
//If Go is inside of boundaries and not facing an obstacle add GO
switch(direction)
{
case 0: // North
if(location.y() == env.c || env.obstacles.contains(new Point2D(location.x(),location.y() + 1)))//If in northmost row, or obstacle
break;
else
moves.add("GO");
break;
case 1: // East
//If we are at the rightmost location
if(location.x() == env.r ||env.obstacles.contains(new Point2D(location.x() + 1, location.y()))) //If in eastmost column, or obstacle in front.
break;
else
moves.add("GO");
break;
case 2: // South
if(location.x() == 0 || env.obstacles.contains( new Point2D(location.x() - 1, location.y() )))//If at bottom row, or obstacle in front.
break;
else
moves.add("GO");
break;
case 3: // West
if(location.x() == 0 || env.obstacles.contains( new Point2D(location.x(), location.y() - 1)))//If at westmost column, or obstacle in front.
break;
}
return moves;
}
|
public List<String> get_legal_moves(Environment env)
{
List<String> moves = new ArrayList<String>();
if(!ON)
{
moves.add("TURN_ON");
return moves;
}
// Suck, turning is always legal.
moves.add("SUCK");
moves.add("TURN_RIGHT");
moves.add("TURN_LEFT");
// We know we must be ON.
moves.add("TURN_OFF");
//If Go is inside of boundaries and not facing an obstacle add GO
switch(direction)
{
case 0: // North
if(location.y() == env.c || env.obstacles.contains(new Point2D(location.x(),location.y() + 1)))//If in northmost row, or obstacle
break;
else
moves.add("GO");
break;
case 1: // East
//If we are at the rightmost location
if(location.x() == env.r ||env.obstacles.contains(new Point2D(location.x() + 1, location.y()))) //If in eastmost column, or obstacle in front.
break;
else
moves.add("GO");
break;
case 2: // South
if(location.x() == 0 || env.obstacles.contains( new Point2D(location.x() - 1, location.y() )))//If at bottom row, or obstacle in front.
break;
else
moves.add("GO");
break;
case 3: // West
if(location.x() == 0 || env.obstacles.contains( new Point2D(location.x(), location.y() - 1)))//If at westmost column, or obstacle in front.
break;
else
moves.add("GO");
break;
default:
System.out.println("Something went wrong when adding GO to the list of moves :/");
}
return moves;
}
|
diff --git a/org.maven.ide.eclipse/src/org/maven/ide/eclipse/internal/project/CustomizableLifecycleMapping.java b/org.maven.ide.eclipse/src/org/maven/ide/eclipse/internal/project/CustomizableLifecycleMapping.java
index 5fcdb85a..18196961 100644
--- a/org.maven.ide.eclipse/src/org/maven/ide/eclipse/internal/project/CustomizableLifecycleMapping.java
+++ b/org.maven.ide.eclipse/src/org/maven/ide/eclipse/internal/project/CustomizableLifecycleMapping.java
@@ -1,168 +1,169 @@
/*******************************************************************************
* Copyright (c) 2008 Sonatype, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*******************************************************************************/
package org.maven.ide.eclipse.internal.project;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.apache.maven.model.Plugin;
import org.apache.maven.plugin.MojoExecution;
import org.apache.maven.project.MavenProject;
import org.maven.ide.eclipse.core.MavenLogger;
import org.maven.ide.eclipse.project.IMavenProjectFacade;
import org.maven.ide.eclipse.project.configurator.AbstractBuildParticipant;
import org.maven.ide.eclipse.project.configurator.AbstractLifecycleMapping;
import org.maven.ide.eclipse.project.configurator.AbstractProjectConfigurator;
import org.maven.ide.eclipse.project.configurator.ILifecycleMapping;
import org.maven.ide.eclipse.project.configurator.MojoExecutionBuildParticipant;
import org.maven.ide.eclipse.project.configurator.ProjectConfigurationRequest;
/**
* CustomizableLifecycleMapping
*
* @author igor
*/
public class CustomizableLifecycleMapping extends AbstractLifecycleMapping implements ILifecycleMapping {
public static final String EXTENSION_ID = "customizable";
public CustomizableLifecycleMapping() {
}
public List<AbstractProjectConfigurator> getProjectConfigurators(IMavenProjectFacade facade, IProgressMonitor monitor)
throws CoreException {
MavenProject mavenProject = facade.getMavenProject(monitor);
Plugin plugin = mavenProject.getPlugin("org.maven.ide.eclipse:lifecycle-mapping");
if(plugin == null) {
throw new IllegalArgumentException("no mapping");
}
// TODO assert version
Map<String, AbstractProjectConfigurator> configuratorsMap = new LinkedHashMap<String, AbstractProjectConfigurator>();
for(AbstractProjectConfigurator configurator : getProjectConfigurators(false)) {
configuratorsMap.put(configurator.getId(), configurator);
}
Xpp3Dom config = (Xpp3Dom) plugin.getConfiguration();
if(config == null) {
throw new IllegalArgumentException("Empty lifecycle mapping configuration");
}
Xpp3Dom configuratorsDom = config.getChild("configurators");
Xpp3Dom executionsDom = config.getChild("mojoExecutions");
List<AbstractProjectConfigurator> configurators = new ArrayList<AbstractProjectConfigurator>();
if (configuratorsDom != null) {
for(Xpp3Dom configuratorDom : configuratorsDom.getChildren("configurator")) {
String configuratorId = configuratorDom.getAttribute("id");
AbstractProjectConfigurator configurator = configuratorsMap.get(configuratorId);
if(configurator == null) {
- throw new IllegalArgumentException("Unknown configurator id=" + configuratorId);
+ String message = "Configurator '"+configuratorId+"' is not available for project '"+facade.getProject().getName()+"'. To enable full functionality, install the configurator and run Maven->Update Project Configuration.";
+ throw new IllegalArgumentException(message);
}
configurators.add(configurator);
}
}
if (executionsDom != null) {
for(Xpp3Dom execution : executionsDom.getChildren("mojoExecution")) {
String strRunOnIncremental = execution.getAttribute("runOnIncremental");
configurators.add(MojoExecutionProjectConfigurator.fromString(execution.getValue(), toBool(strRunOnIncremental, true)));
}
}
return configurators;
}
private boolean toBool(String value, boolean def) {
if(value == null || value.length() == 0) {
return def;
}
return Boolean.parseBoolean(value);
}
private Set<String> getListElements(Xpp3Dom listDom, String elementName) {
Set<String> elements = new LinkedHashSet<String>();
if (listDom == null) {
return elements;
}
for (Xpp3Dom elementDom : listDom.getChildren(elementName)) {
elements.add(elementDom.getValue());
}
return elements;
}
public List<AbstractBuildParticipant> getBuildParticipants(IMavenProjectFacade facade, IProgressMonitor monitor)
throws CoreException {
List<AbstractProjectConfigurator> configurators = getProjectConfigurators(facade, monitor);
List<AbstractBuildParticipant> participants = new ArrayList<AbstractBuildParticipant>();
for (MojoExecution execution : facade.getExecutionPlan(monitor).getExecutions()) {
for (AbstractProjectConfigurator configurator : configurators) {
AbstractBuildParticipant participant = configurator.getBuildParticipant(execution);
if (participant != null) {
participants.add(participant);
}
}
}
return participants;
}
public void configure(ProjectConfigurationRequest request, IProgressMonitor monitor) throws CoreException {
super.configure(request, monitor);
addMavenBuilder(request.getProject(), monitor);
}
/* (non-Javadoc)
* @see org.maven.ide.eclipse.project.configurator.ILifecycleMapping#getPotentialMojoExecutionsForBuildKind(org.maven.ide.eclipse.project.IMavenProjectFacade, int, org.eclipse.core.runtime.IProgressMonitor)
*/
public List<String> getPotentialMojoExecutionsForBuildKind(IMavenProjectFacade projectFacade, int kind,
IProgressMonitor progressMonitor) {
List<String> mojos = new LinkedList<String>();
try {
for (MojoExecution execution : projectFacade.getExecutionPlan(progressMonitor).getExecutions()) {
for (AbstractProjectConfigurator configurator : getProjectConfigurators(projectFacade, progressMonitor)) {
AbstractBuildParticipant participant = configurator.getBuildParticipant(execution);
if (participant != null && participant instanceof MojoExecutionBuildParticipant) {
if(((MojoExecutionBuildParticipant)participant).appliesToBuildKind(kind)) {
MojoExecution mojo = ((MojoExecutionBuildParticipant)participant).getMojoExecution();
mojos.add(MojoExecutionUtils.getExecutionKey(mojo));
}
}
}
}
} catch(CoreException ex) {
MavenLogger.log(ex);
}
return mojos;
}
}
| true
| true
|
public List<AbstractProjectConfigurator> getProjectConfigurators(IMavenProjectFacade facade, IProgressMonitor monitor)
throws CoreException {
MavenProject mavenProject = facade.getMavenProject(monitor);
Plugin plugin = mavenProject.getPlugin("org.maven.ide.eclipse:lifecycle-mapping");
if(plugin == null) {
throw new IllegalArgumentException("no mapping");
}
// TODO assert version
Map<String, AbstractProjectConfigurator> configuratorsMap = new LinkedHashMap<String, AbstractProjectConfigurator>();
for(AbstractProjectConfigurator configurator : getProjectConfigurators(false)) {
configuratorsMap.put(configurator.getId(), configurator);
}
Xpp3Dom config = (Xpp3Dom) plugin.getConfiguration();
if(config == null) {
throw new IllegalArgumentException("Empty lifecycle mapping configuration");
}
Xpp3Dom configuratorsDom = config.getChild("configurators");
Xpp3Dom executionsDom = config.getChild("mojoExecutions");
List<AbstractProjectConfigurator> configurators = new ArrayList<AbstractProjectConfigurator>();
if (configuratorsDom != null) {
for(Xpp3Dom configuratorDom : configuratorsDom.getChildren("configurator")) {
String configuratorId = configuratorDom.getAttribute("id");
AbstractProjectConfigurator configurator = configuratorsMap.get(configuratorId);
if(configurator == null) {
throw new IllegalArgumentException("Unknown configurator id=" + configuratorId);
}
configurators.add(configurator);
}
}
if (executionsDom != null) {
for(Xpp3Dom execution : executionsDom.getChildren("mojoExecution")) {
String strRunOnIncremental = execution.getAttribute("runOnIncremental");
configurators.add(MojoExecutionProjectConfigurator.fromString(execution.getValue(), toBool(strRunOnIncremental, true)));
}
}
return configurators;
}
|
public List<AbstractProjectConfigurator> getProjectConfigurators(IMavenProjectFacade facade, IProgressMonitor monitor)
throws CoreException {
MavenProject mavenProject = facade.getMavenProject(monitor);
Plugin plugin = mavenProject.getPlugin("org.maven.ide.eclipse:lifecycle-mapping");
if(plugin == null) {
throw new IllegalArgumentException("no mapping");
}
// TODO assert version
Map<String, AbstractProjectConfigurator> configuratorsMap = new LinkedHashMap<String, AbstractProjectConfigurator>();
for(AbstractProjectConfigurator configurator : getProjectConfigurators(false)) {
configuratorsMap.put(configurator.getId(), configurator);
}
Xpp3Dom config = (Xpp3Dom) plugin.getConfiguration();
if(config == null) {
throw new IllegalArgumentException("Empty lifecycle mapping configuration");
}
Xpp3Dom configuratorsDom = config.getChild("configurators");
Xpp3Dom executionsDom = config.getChild("mojoExecutions");
List<AbstractProjectConfigurator> configurators = new ArrayList<AbstractProjectConfigurator>();
if (configuratorsDom != null) {
for(Xpp3Dom configuratorDom : configuratorsDom.getChildren("configurator")) {
String configuratorId = configuratorDom.getAttribute("id");
AbstractProjectConfigurator configurator = configuratorsMap.get(configuratorId);
if(configurator == null) {
String message = "Configurator '"+configuratorId+"' is not available for project '"+facade.getProject().getName()+"'. To enable full functionality, install the configurator and run Maven->Update Project Configuration.";
throw new IllegalArgumentException(message);
}
configurators.add(configurator);
}
}
if (executionsDom != null) {
for(Xpp3Dom execution : executionsDom.getChildren("mojoExecution")) {
String strRunOnIncremental = execution.getAttribute("runOnIncremental");
configurators.add(MojoExecutionProjectConfigurator.fromString(execution.getValue(), toBool(strRunOnIncremental, true)));
}
}
return configurators;
}
|
diff --git a/staging/testsuite/src/test/java/org/sonatype/nexus/maven/staging/it/nxcm5194/Nxcm5194GLevelRepositoryMetadataSupport.java b/staging/testsuite/src/test/java/org/sonatype/nexus/maven/staging/it/nxcm5194/Nxcm5194GLevelRepositoryMetadataSupport.java
index 3c09eb3..78b54dc 100644
--- a/staging/testsuite/src/test/java/org/sonatype/nexus/maven/staging/it/nxcm5194/Nxcm5194GLevelRepositoryMetadataSupport.java
+++ b/staging/testsuite/src/test/java/org/sonatype/nexus/maven/staging/it/nxcm5194/Nxcm5194GLevelRepositoryMetadataSupport.java
@@ -1,192 +1,196 @@
/*
* Sonatype Nexus (TM) Open Source Version
* Copyright (c) 2007-2012 Sonatype, Inc.
* All rights reserved. Includes the third-party code listed at http://links.sonatype.com/products/nexus/oss/attributions.
*
* This program and the accompanying materials are made available under the terms of the Eclipse Public License Version 1.0,
* which accompanies this distribution and is available at http://www.eclipse.org/legal/epl-v10.html.
*
* Sonatype Nexus (TM) Professional Version is available from Sonatype, Inc. "Sonatype" and "Sonatype Nexus" are trademarks
* of Sonatype, Inc. Apache Maven is a trademark of the Apache Software Foundation. M2eclipse is a trademark of the
* Eclipse Foundation. All other trademarks are the property of their respective owners.
*/
package org.sonatype.nexus.maven.staging.it.nxcm5194;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Arrays;
import javax.annotation.Nullable;
import org.apache.maven.artifact.repository.metadata.Metadata;
import org.apache.maven.artifact.repository.metadata.Plugin;
import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
import org.apache.maven.it.VerificationException;
import org.codehaus.plexus.util.IOUtil;
import org.codehaus.plexus.util.StringUtils;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import org.junit.Test;
import org.sonatype.nexus.client.core.subsystem.content.Content;
import org.sonatype.nexus.client.core.subsystem.content.Location;
import org.sonatype.nexus.maven.staging.it.PreparedVerifier;
import org.sonatype.nexus.maven.staging.it.SimpleRoundtripMatrixSupport;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.not;
/**
* IT Support for https://issues.sonatype.org/browse/NXCM-5194
* <p>
* It verifies that G level repository metadata is properly deployed and contains the proper bits about the maven plugin
* being built.
*
* @author cstamas
*/
public abstract class Nxcm5194GLevelRepositoryMetadataSupport
extends SimpleRoundtripMatrixSupport
{
public Nxcm5194GLevelRepositoryMetadataSupport( final String nexusBundleCoordinates )
{
super( nexusBundleCoordinates );
}
// == the tests
/**
* Maven Plugin Project set up in m2-way with m2.
*
* @throws VerificationException
* @throws IOException
*/
@Test
public void roundtripWithM2ProjectUsingM2()
throws VerificationException, IOException
{
roundtrip( createMavenVerifier( M2_VERSION, new File( getBasedir(),
"target/test-classes/maven2-maven-plugin-project" ) ) );
}
/**
* Maven Plugin Project set up in m2-way with m3.
*
* @throws VerificationException
* @throws IOException
*/
@Test
public void roundtripWithM2ProjectUsingM3()
throws VerificationException, IOException
{
roundtrip( createMavenVerifier( M3_VERSION, new File( getBasedir(),
"target/test-classes/maven2-maven-plugin-project" ) ) );
}
/**
* Maven Plugin Project set up in m3-way using m3.
*
* @throws VerificationException
* @throws IOException
*/
@Test
public void roundtripWithM3ProjectUsingM3()
throws VerificationException, IOException
{
roundtrip( createMavenVerifier( M3_VERSION, new File( getBasedir(),
"target/test-classes/maven3-maven-plugin-project" ) ) );
}
// we always invoke the same, but results will be different: with deferred deploy
// they will land into snapshots, with staging they will land in some
// (closed) staging repo. That's why we have getTargetedRepositoryId() that will
// tell us from where to fetch the G level MD
@Override
protected void invokeMaven( final PreparedVerifier verifier )
throws VerificationException
{
// the workflow
verifier.executeGoals( Arrays.asList( "clean", "deploy" ) );
// should not fail
verifier.verifyErrorFreeLog();
}
// == Scenario specific methods
@Override
protected abstract PreparedVerifier createMavenVerifier( final String mavenVersion, final File projectDirectory )
throws VerificationException, IOException;
protected abstract String getTargetedRepositoryId();
// == Assertions
@Override
protected void preNexusAssertions( final PreparedVerifier verifier )
{
assertThat( getAllStagingRepositories().toString(), getAllStagingRepositories(), hasSize( 0 ) );
}
@Override
protected void postNexusAssertions( final PreparedVerifier verifier )
{
assertThat( getAllStagingRepositories().toString(), getAllStagingRepositories(), hasSize( 0 ) );
checkGLevelMD( verifier );
}
protected void checkGLevelMD( final PreparedVerifier verifier )
{
final Content content = getNexusClient().getSubsystem( Content.class );
final File target = util.createTempFile();
FileInputStream fis = null;
try
{
content.download(
Location.repositoryLocation( getTargetedRepositoryId(), "/"
+ getClass().getPackage().getName().replaceAll( "\\.", "/" ) + "/maven-metadata.xml" ), target );
fis = new FileInputStream( target );
final Metadata md = new MetadataXpp3Reader().read( fis );
// depending on test execution, this collection might have size of 1, 2 or 3
assertThat( md.getPlugins(), not( empty() ) );
for ( Plugin plugin : md.getPlugins() )
{
// see raw-pom.xml, the prefix is set to artifactId to avoid 3 deploys of different GAVs with same
// prefix
if ( StringUtils.equals( plugin.getArtifactId(), verifier.getProjectArtifactId() )
&& StringUtils.equals( plugin.getPrefix(), verifier.getProjectArtifactId() ) )
{
// we got it, good
return;
}
}
throw new AssertionError( "The maven-plugin with artifact ID " + verifier.getProjectArtifactId()
+ " should be contained in this Plugin list: "
+ Lists.transform( md.getPlugins(), new Function<Plugin, String>()
{
@Override
public String apply( Plugin input )
{
+ if ( null == input )
+ {
+ return "invalid-plugin-null-value";
+ }
return input.getArtifactId();
}
} ) );
}
catch ( XmlPullParserException e )
{
throw new AssertionError( "The metadata parse failed: " + e.getMessage() );
}
catch ( IOException e )
{
throw new AssertionError( "The metadata download failed: " + e.getMessage() );
}
finally
{
IOUtil.close( fis );
}
}
}
| true
| true
|
protected void checkGLevelMD( final PreparedVerifier verifier )
{
final Content content = getNexusClient().getSubsystem( Content.class );
final File target = util.createTempFile();
FileInputStream fis = null;
try
{
content.download(
Location.repositoryLocation( getTargetedRepositoryId(), "/"
+ getClass().getPackage().getName().replaceAll( "\\.", "/" ) + "/maven-metadata.xml" ), target );
fis = new FileInputStream( target );
final Metadata md = new MetadataXpp3Reader().read( fis );
// depending on test execution, this collection might have size of 1, 2 or 3
assertThat( md.getPlugins(), not( empty() ) );
for ( Plugin plugin : md.getPlugins() )
{
// see raw-pom.xml, the prefix is set to artifactId to avoid 3 deploys of different GAVs with same
// prefix
if ( StringUtils.equals( plugin.getArtifactId(), verifier.getProjectArtifactId() )
&& StringUtils.equals( plugin.getPrefix(), verifier.getProjectArtifactId() ) )
{
// we got it, good
return;
}
}
throw new AssertionError( "The maven-plugin with artifact ID " + verifier.getProjectArtifactId()
+ " should be contained in this Plugin list: "
+ Lists.transform( md.getPlugins(), new Function<Plugin, String>()
{
@Override
public String apply( Plugin input )
{
return input.getArtifactId();
}
} ) );
}
catch ( XmlPullParserException e )
{
throw new AssertionError( "The metadata parse failed: " + e.getMessage() );
}
catch ( IOException e )
{
throw new AssertionError( "The metadata download failed: " + e.getMessage() );
}
finally
{
IOUtil.close( fis );
}
}
|
protected void checkGLevelMD( final PreparedVerifier verifier )
{
final Content content = getNexusClient().getSubsystem( Content.class );
final File target = util.createTempFile();
FileInputStream fis = null;
try
{
content.download(
Location.repositoryLocation( getTargetedRepositoryId(), "/"
+ getClass().getPackage().getName().replaceAll( "\\.", "/" ) + "/maven-metadata.xml" ), target );
fis = new FileInputStream( target );
final Metadata md = new MetadataXpp3Reader().read( fis );
// depending on test execution, this collection might have size of 1, 2 or 3
assertThat( md.getPlugins(), not( empty() ) );
for ( Plugin plugin : md.getPlugins() )
{
// see raw-pom.xml, the prefix is set to artifactId to avoid 3 deploys of different GAVs with same
// prefix
if ( StringUtils.equals( plugin.getArtifactId(), verifier.getProjectArtifactId() )
&& StringUtils.equals( plugin.getPrefix(), verifier.getProjectArtifactId() ) )
{
// we got it, good
return;
}
}
throw new AssertionError( "The maven-plugin with artifact ID " + verifier.getProjectArtifactId()
+ " should be contained in this Plugin list: "
+ Lists.transform( md.getPlugins(), new Function<Plugin, String>()
{
@Override
public String apply( Plugin input )
{
if ( null == input )
{
return "invalid-plugin-null-value";
}
return input.getArtifactId();
}
} ) );
}
catch ( XmlPullParserException e )
{
throw new AssertionError( "The metadata parse failed: " + e.getMessage() );
}
catch ( IOException e )
{
throw new AssertionError( "The metadata download failed: " + e.getMessage() );
}
finally
{
IOUtil.close( fis );
}
}
|
diff --git a/common/items/recipe/RecipePocketLinkUpgrade.java b/common/items/recipe/RecipePocketLinkUpgrade.java
index 9360560..017227a 100644
--- a/common/items/recipe/RecipePocketLinkUpgrade.java
+++ b/common/items/recipe/RecipePocketLinkUpgrade.java
@@ -1,99 +1,99 @@
package fuj1n.globalLinkMod.common.items.recipe;
import net.minecraft.block.Block;
import net.minecraft.enchantment.Enchantment;
import net.minecraft.inventory.InventoryCrafting;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.item.crafting.IRecipe;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.world.World;
import fuj1n.globalLinkMod.GlobalChests;
public class RecipePocketLinkUpgrade implements IRecipe{
@Override
public boolean matches(InventoryCrafting inventorycrafting, World world) {
ItemStack[] stacks = new ItemStack[inventorycrafting.getSizeInventory()];
for(int i = 0; i < stacks.length; i++){
stacks[i] = inventorycrafting.getStackInSlot(i);
}
for(int i = 0; i < stacks.length || stacks.length == 0; i++){
if(stacks.length == 0 || stacks[i] == null){
return false;
}
}
- if(stacks[4] != null && stacks[4].getItem() == GlobalChests.pocketLink){
+ if(stacks.length > 4 && stacks[4] != null && stacks[4].getItem() == GlobalChests.pocketLink){
NBTTagList enchantments = stacks[4].getEnchantmentTagList();
NBTTagCompound nbt1 = null;
if(enchantments != null && enchantments.tagAt(0) != null){
nbt1 = (NBTTagCompound) enchantments.tagAt(0);
}
Enchantment ench = null;
int enchantLevel = 0;
if(nbt1 != null){
ench = Enchantment.enchantmentsList[nbt1.getShort("id")];
enchantLevel = nbt1.getShort("lvl");
}
if(ench == GlobalChests.enchantmentRange && enchantLevel > 0 || enchantLevel == 0){
switch(enchantLevel){
case 0:
return stacks[0].isItemEqual(new ItemStack(Item.ingotIron)) && stacks[1].isItemEqual(new ItemStack(Item.diamond)) && stacks[2].isItemEqual(new ItemStack(Item.ingotGold)) && stacks[3].isItemEqual(new ItemStack(Item.diamond)) && stacks[4].isItemEqual(new ItemStack(GlobalChests.pocketLink)) && stacks[5].isItemEqual(new ItemStack(Item.diamond)) && stacks[6].isItemEqual(new ItemStack(Item.ingotGold)) && stacks[7].isItemEqual(new ItemStack(Item.diamond)) && stacks[8].isItemEqual(new ItemStack(Item.ingotIron));
case 1:
return stacks[0].isItemEqual(new ItemStack(Block.blockIron)) && stacks[1].isItemEqual(new ItemStack(Item.diamond)) && stacks[2].isItemEqual(new ItemStack(Block.blockGold)) && stacks[3].isItemEqual(new ItemStack(Item.diamond)) && stacks[4].isItemEqual(new ItemStack(GlobalChests.pocketLink)) && stacks[5].isItemEqual(new ItemStack(Item.diamond)) && stacks[6].isItemEqual(new ItemStack(Block.blockGold)) && stacks[7].isItemEqual(new ItemStack(Item.diamond)) && stacks[8].isItemEqual(new ItemStack(Block.blockIron));
case 2:
return stacks[0].isItemEqual(new ItemStack(Item.diamond)) && stacks[1].isItemEqual(new ItemStack(Item.diamond)) && stacks[2].isItemEqual(new ItemStack(Item.diamond)) && stacks[3].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[4].isItemEqual(new ItemStack(GlobalChests.pocketLink)) && stacks[5].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[6].isItemEqual(new ItemStack(Item.diamond)) && stacks[7].isItemEqual(new ItemStack(Item.diamond)) && stacks[8].isItemEqual(new ItemStack(Item.diamond));
case 3:
return stacks[0].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[1].isItemEqual(new ItemStack(Item.netherStar)) && stacks[2].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[3].isItemEqual(new ItemStack(Item.diamond)) && stacks[4].isItemEqual(new ItemStack(GlobalChests.pocketLink)) && stacks[5].isItemEqual(new ItemStack(Item.diamond)) && stacks[6].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[7].isItemEqual(new ItemStack(Item.netherStar)) && stacks[8].isItemEqual(new ItemStack(Block.blockDiamond));
case 4:
return stacks[0].isItemEqual(new ItemStack(Item.netherStar)) && stacks[1].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[2].isItemEqual(new ItemStack(Item.netherStar)) && stacks[3].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[4].isItemEqual(new ItemStack(GlobalChests.pocketLink)) && stacks[5].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[6].isItemEqual(new ItemStack(Item.netherStar)) && stacks[7].isItemEqual(new ItemStack(Block.dirt)) && stacks[8].isItemEqual(new ItemStack(Item.netherStar));
default:
return false;
}
}
else{
return false;
}
}else{
return false;
}
}
@Override
public ItemStack getCraftingResult(InventoryCrafting inventorycrafting) {
ItemStack[] stacks = new ItemStack[inventorycrafting.getSizeInventory()];
for(int i = 0; i < stacks.length; i++){
stacks[i] = inventorycrafting.getStackInSlot(i);
}
NBTTagList enchantments = stacks[4].getEnchantmentTagList();
NBTTagCompound nbt1 = null;
if(enchantments != null && enchantments.tagAt(0) != null){
nbt1 = (NBTTagCompound) enchantments.tagAt(0);
}
Enchantment ench = null;
int enchantLevel = 0;
if(nbt1 != null){
ench = Enchantment.enchantmentsList[nbt1.getShort("id")];
enchantLevel = nbt1.getShort("lvl");
}
ItemStack is = new ItemStack(GlobalChests.pocketLink);
is.addEnchantment(GlobalChests.enchantmentRange, enchantLevel + 1);
return is;
}
@Override
public int getRecipeSize() {
return 3;
}
@Override
public ItemStack getRecipeOutput() {
return new ItemStack(GlobalChests.pocketLink, 0, 1);
}
}
| true
| true
|
public boolean matches(InventoryCrafting inventorycrafting, World world) {
ItemStack[] stacks = new ItemStack[inventorycrafting.getSizeInventory()];
for(int i = 0; i < stacks.length; i++){
stacks[i] = inventorycrafting.getStackInSlot(i);
}
for(int i = 0; i < stacks.length || stacks.length == 0; i++){
if(stacks.length == 0 || stacks[i] == null){
return false;
}
}
if(stacks[4] != null && stacks[4].getItem() == GlobalChests.pocketLink){
NBTTagList enchantments = stacks[4].getEnchantmentTagList();
NBTTagCompound nbt1 = null;
if(enchantments != null && enchantments.tagAt(0) != null){
nbt1 = (NBTTagCompound) enchantments.tagAt(0);
}
Enchantment ench = null;
int enchantLevel = 0;
if(nbt1 != null){
ench = Enchantment.enchantmentsList[nbt1.getShort("id")];
enchantLevel = nbt1.getShort("lvl");
}
if(ench == GlobalChests.enchantmentRange && enchantLevel > 0 || enchantLevel == 0){
switch(enchantLevel){
case 0:
return stacks[0].isItemEqual(new ItemStack(Item.ingotIron)) && stacks[1].isItemEqual(new ItemStack(Item.diamond)) && stacks[2].isItemEqual(new ItemStack(Item.ingotGold)) && stacks[3].isItemEqual(new ItemStack(Item.diamond)) && stacks[4].isItemEqual(new ItemStack(GlobalChests.pocketLink)) && stacks[5].isItemEqual(new ItemStack(Item.diamond)) && stacks[6].isItemEqual(new ItemStack(Item.ingotGold)) && stacks[7].isItemEqual(new ItemStack(Item.diamond)) && stacks[8].isItemEqual(new ItemStack(Item.ingotIron));
case 1:
return stacks[0].isItemEqual(new ItemStack(Block.blockIron)) && stacks[1].isItemEqual(new ItemStack(Item.diamond)) && stacks[2].isItemEqual(new ItemStack(Block.blockGold)) && stacks[3].isItemEqual(new ItemStack(Item.diamond)) && stacks[4].isItemEqual(new ItemStack(GlobalChests.pocketLink)) && stacks[5].isItemEqual(new ItemStack(Item.diamond)) && stacks[6].isItemEqual(new ItemStack(Block.blockGold)) && stacks[7].isItemEqual(new ItemStack(Item.diamond)) && stacks[8].isItemEqual(new ItemStack(Block.blockIron));
case 2:
return stacks[0].isItemEqual(new ItemStack(Item.diamond)) && stacks[1].isItemEqual(new ItemStack(Item.diamond)) && stacks[2].isItemEqual(new ItemStack(Item.diamond)) && stacks[3].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[4].isItemEqual(new ItemStack(GlobalChests.pocketLink)) && stacks[5].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[6].isItemEqual(new ItemStack(Item.diamond)) && stacks[7].isItemEqual(new ItemStack(Item.diamond)) && stacks[8].isItemEqual(new ItemStack(Item.diamond));
case 3:
return stacks[0].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[1].isItemEqual(new ItemStack(Item.netherStar)) && stacks[2].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[3].isItemEqual(new ItemStack(Item.diamond)) && stacks[4].isItemEqual(new ItemStack(GlobalChests.pocketLink)) && stacks[5].isItemEqual(new ItemStack(Item.diamond)) && stacks[6].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[7].isItemEqual(new ItemStack(Item.netherStar)) && stacks[8].isItemEqual(new ItemStack(Block.blockDiamond));
case 4:
return stacks[0].isItemEqual(new ItemStack(Item.netherStar)) && stacks[1].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[2].isItemEqual(new ItemStack(Item.netherStar)) && stacks[3].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[4].isItemEqual(new ItemStack(GlobalChests.pocketLink)) && stacks[5].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[6].isItemEqual(new ItemStack(Item.netherStar)) && stacks[7].isItemEqual(new ItemStack(Block.dirt)) && stacks[8].isItemEqual(new ItemStack(Item.netherStar));
default:
return false;
}
}
else{
return false;
}
}else{
return false;
}
}
|
public boolean matches(InventoryCrafting inventorycrafting, World world) {
ItemStack[] stacks = new ItemStack[inventorycrafting.getSizeInventory()];
for(int i = 0; i < stacks.length; i++){
stacks[i] = inventorycrafting.getStackInSlot(i);
}
for(int i = 0; i < stacks.length || stacks.length == 0; i++){
if(stacks.length == 0 || stacks[i] == null){
return false;
}
}
if(stacks.length > 4 && stacks[4] != null && stacks[4].getItem() == GlobalChests.pocketLink){
NBTTagList enchantments = stacks[4].getEnchantmentTagList();
NBTTagCompound nbt1 = null;
if(enchantments != null && enchantments.tagAt(0) != null){
nbt1 = (NBTTagCompound) enchantments.tagAt(0);
}
Enchantment ench = null;
int enchantLevel = 0;
if(nbt1 != null){
ench = Enchantment.enchantmentsList[nbt1.getShort("id")];
enchantLevel = nbt1.getShort("lvl");
}
if(ench == GlobalChests.enchantmentRange && enchantLevel > 0 || enchantLevel == 0){
switch(enchantLevel){
case 0:
return stacks[0].isItemEqual(new ItemStack(Item.ingotIron)) && stacks[1].isItemEqual(new ItemStack(Item.diamond)) && stacks[2].isItemEqual(new ItemStack(Item.ingotGold)) && stacks[3].isItemEqual(new ItemStack(Item.diamond)) && stacks[4].isItemEqual(new ItemStack(GlobalChests.pocketLink)) && stacks[5].isItemEqual(new ItemStack(Item.diamond)) && stacks[6].isItemEqual(new ItemStack(Item.ingotGold)) && stacks[7].isItemEqual(new ItemStack(Item.diamond)) && stacks[8].isItemEqual(new ItemStack(Item.ingotIron));
case 1:
return stacks[0].isItemEqual(new ItemStack(Block.blockIron)) && stacks[1].isItemEqual(new ItemStack(Item.diamond)) && stacks[2].isItemEqual(new ItemStack(Block.blockGold)) && stacks[3].isItemEqual(new ItemStack(Item.diamond)) && stacks[4].isItemEqual(new ItemStack(GlobalChests.pocketLink)) && stacks[5].isItemEqual(new ItemStack(Item.diamond)) && stacks[6].isItemEqual(new ItemStack(Block.blockGold)) && stacks[7].isItemEqual(new ItemStack(Item.diamond)) && stacks[8].isItemEqual(new ItemStack(Block.blockIron));
case 2:
return stacks[0].isItemEqual(new ItemStack(Item.diamond)) && stacks[1].isItemEqual(new ItemStack(Item.diamond)) && stacks[2].isItemEqual(new ItemStack(Item.diamond)) && stacks[3].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[4].isItemEqual(new ItemStack(GlobalChests.pocketLink)) && stacks[5].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[6].isItemEqual(new ItemStack(Item.diamond)) && stacks[7].isItemEqual(new ItemStack(Item.diamond)) && stacks[8].isItemEqual(new ItemStack(Item.diamond));
case 3:
return stacks[0].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[1].isItemEqual(new ItemStack(Item.netherStar)) && stacks[2].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[3].isItemEqual(new ItemStack(Item.diamond)) && stacks[4].isItemEqual(new ItemStack(GlobalChests.pocketLink)) && stacks[5].isItemEqual(new ItemStack(Item.diamond)) && stacks[6].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[7].isItemEqual(new ItemStack(Item.netherStar)) && stacks[8].isItemEqual(new ItemStack(Block.blockDiamond));
case 4:
return stacks[0].isItemEqual(new ItemStack(Item.netherStar)) && stacks[1].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[2].isItemEqual(new ItemStack(Item.netherStar)) && stacks[3].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[4].isItemEqual(new ItemStack(GlobalChests.pocketLink)) && stacks[5].isItemEqual(new ItemStack(Block.blockDiamond)) && stacks[6].isItemEqual(new ItemStack(Item.netherStar)) && stacks[7].isItemEqual(new ItemStack(Block.dirt)) && stacks[8].isItemEqual(new ItemStack(Item.netherStar));
default:
return false;
}
}
else{
return false;
}
}else{
return false;
}
}
|
diff --git a/javasvn/src/org/tmatesoft/svn/core/internal/io/svn/SVNRepositoryImpl.java b/javasvn/src/org/tmatesoft/svn/core/internal/io/svn/SVNRepositoryImpl.java
index e5bfd034f..f78dea24f 100644
--- a/javasvn/src/org/tmatesoft/svn/core/internal/io/svn/SVNRepositoryImpl.java
+++ b/javasvn/src/org/tmatesoft/svn/core/internal/io/svn/SVNRepositoryImpl.java
@@ -1,572 +1,578 @@
/*
* ====================================================================
* Copyright (c) 2004 TMate Software Ltd. All rights reserved.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at http://tmate.org/svn/license.html.
* If newer versions of this license are posted there, you may use a
* newer version instead, at your option.
* ====================================================================
*/
package org.tmatesoft.svn.core.internal.io.svn;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import org.tmatesoft.svn.core.SVNProperty;
import org.tmatesoft.svn.core.diff.SVNDiffWindow;
import org.tmatesoft.svn.core.diff.SVNDiffWindowBuilder;
import org.tmatesoft.svn.core.io.ISVNCredentials;
import org.tmatesoft.svn.core.io.ISVNDirEntryHandler;
import org.tmatesoft.svn.core.io.ISVNEditor;
import org.tmatesoft.svn.core.io.ISVNFileRevisionHandler;
import org.tmatesoft.svn.core.io.ISVNLocationEntryHandler;
import org.tmatesoft.svn.core.io.ISVNLogEntryHandler;
import org.tmatesoft.svn.core.io.ISVNReporter;
import org.tmatesoft.svn.core.io.ISVNReporterBaton;
import org.tmatesoft.svn.core.io.ISVNWorkspaceMediator;
import org.tmatesoft.svn.core.io.SVNException;
import org.tmatesoft.svn.core.io.SVNFileRevision;
import org.tmatesoft.svn.core.io.SVNLocationEntry;
import org.tmatesoft.svn.core.io.SVNLogEntry;
import org.tmatesoft.svn.core.io.SVNLogEntryPath;
import org.tmatesoft.svn.core.io.SVNNodeKind;
import org.tmatesoft.svn.core.io.SVNRepository;
import org.tmatesoft.svn.core.io.SVNRepositoryLocation;
import org.tmatesoft.svn.util.DebugLog;
import org.tmatesoft.svn.util.PathUtil;
/**
* @author Alexander Kitaev
*/
public class SVNRepositoryImpl extends SVNRepository implements ISVNReporter {
private SVNConnection myConnection;
private ISVNCredentials myCredentials;
private String myFullRoot;
protected SVNRepositoryImpl(SVNRepositoryLocation location) {
super(location);
}
public void testConnection() throws SVNException {
try {
openConnection();
} finally {
closeConnection();
}
}
public long getLatestRevision() throws SVNException {
Object[] buffer = new Object[] { "get-latest-rev" };
try {
openConnection();
write("(w())", buffer);
authenticate();
buffer = read("[(N)]", buffer);
} finally {
closeConnection();
}
return SVNReader.getLong(buffer, 0);
}
public long getDatedRevision(Date date) throws SVNException {
if (date == null) {
date = new Date(System.currentTimeMillis());
}
Object[] buffer = new Object[] { "get-dated-rev", date };
try {
openConnection();
write("(w(s))", buffer);
authenticate();
buffer = read("[(N)]", buffer);
} finally {
closeConnection();
}
return SVNReader.getLong(buffer, 0);
}
public Map getRevisionProperties(long revision, Map properties) throws SVNException {
assertValidRevision(revision);
if (properties == null) {
properties = new HashMap();
}
Object[] buffer = new Object[] { "rev-proplist", getRevisionObject(revision) };
try {
openConnection();
write("(w(n))", buffer);
authenticate();
buffer[0] = properties;
read("[((*P))]", buffer);
} finally {
closeConnection();
}
return properties;
}
public String getRevisionPropertyValue(long revision, String propertyName) throws SVNException {
assertValidRevision(revision);
Object[] buffer = new Object[] { "rev-prop", getRevisionObject(revision), propertyName };
try {
openConnection();
write("(w(ns))", buffer);
authenticate();
buffer = read("[((?S))]", buffer);
} finally {
closeConnection();
}
return (String) buffer[0];
}
public SVNNodeKind checkPath(String path, long revision) throws SVNException {
try {
openConnection();
path = getRepositoryPath(path);
Object[] buffer = new Object[] { "check-path", path, getRevisionObject(revision) };
write("(w(s(n)))", buffer);
authenticate();
read("[(W)]", buffer);
return SVNNodeKind.parseKind((String) buffer[0]);
} finally {
closeConnection();
}
}
public int getLocations(String path, long pegRevision, long[] revisions, ISVNLocationEntryHandler handler) throws SVNException {
assertValidRevision(pegRevision);
for (int i = 0; i < revisions.length; i++) {
assertValidRevision(revisions[i]);
}
int count = 0;
try {
openConnection();
path = getRepositoryPath(path);
Object[] buffer = new Object[] { "get-locations", path, getRevisionObject(pegRevision), revisions };
write("(w(sn(*n)))", buffer);
authenticate();
try {
while (true) {
read("(NS)", buffer);
count++;
if (handler != null) {
long revision = SVNReader.getLong(buffer, 0);
String location = SVNReader.getString(buffer, 1);
if (location != null) {
handler.handleLocationEntry(new SVNLocationEntry(revision, location));
}
}
}
} catch (SVNException e) {
read("x", buffer);
}
read("[()]", buffer);
} finally {
closeConnection();
}
return count;
}
public long getFile(String path, long revision, Map properties, OutputStream contents) throws SVNException {
Long rev = revision > 0 ? new Long(revision) : null;
try {
openConnection();
Object[] buffer = new Object[] { "get-file", getRepositoryPath(path), rev, Boolean.valueOf(properties != null), Boolean.valueOf(contents != null) };
write("(w(s(n)ww))", buffer);
authenticate();
buffer[2] = properties;
buffer = read("[((?S)N(*P))]", buffer);
if (properties != null) {
properties.put(SVNProperty.REVISION, buffer[1].toString());
properties.put(SVNProperty.CHECKSUM, buffer[0].toString());
}
if (contents != null) {
Object[] buffer2 = new Object[] { contents };
read("*I", buffer2);
read("[()]", buffer2);
}
return SVNReader.getLong(buffer, 1);
} finally {
closeConnection();
}
}
public long getDir(String path, long revision, Map properties, ISVNDirEntryHandler handler) throws SVNException {
Long rev = getRevisionObject(revision);
// convert path to path relative to repos root.
try {
long start = System.currentTimeMillis();
openConnection();
DebugLog.log("openConnection(): " + (System.currentTimeMillis() - start));
start = System.currentTimeMillis();
path = getRepositoryPath(path);
Object[] buffer = new Object[] { "get-dir", path, rev, Boolean.valueOf(properties != null), Boolean.valueOf(handler != null) };
write("(w(s(n)ww))", buffer);
authenticate();
buffer[1] = properties;
buffer = read("[(N(*P)", buffer);
revision = buffer[0] != null ? SVNReader.getLong(buffer, 0) : revision;
if (handler != null) {
buffer[0] = handler;
read("(*D)))", buffer);
}
DebugLog.log("getDir() finished: " + (System.currentTimeMillis() - start));
} finally {
long start = System.currentTimeMillis();
closeConnection();
DebugLog.log("closeConnection(): " + (System.currentTimeMillis() - start));
}
return revision;
}
public int getFileRevisions(String path, long sRevision, long eRevision, ISVNFileRevisionHandler handler) throws SVNException {
Long srev = getRevisionObject(sRevision);
Long erev = getRevisionObject(eRevision);
int count = 0;
try {
openConnection();
Object[] buffer = new Object[] { "get-file-revs", getRepositoryPath(path), srev, erev };
write("(w(s(n)(n)))", buffer);
authenticate();
while (true) {
SVNFileRevision fileRevision = null;
try {
read("(SN(*P)(*Z))", buffer);
count++;
} catch (SVNException e) {
read("x", buffer);
return count;
}
String name = null;
if (handler != null) {
name = (String) buffer[0];
long revision = SVNReader.getLong(buffer, 1);
Map properties = SVNReader.getMap(buffer, 2);
Map propertiesDelta = SVNReader.getMap(buffer, 3);
if (name != null) {
fileRevision = new SVNFileRevision(name, revision, properties, propertiesDelta);
}
buffer[2] = null;
buffer[3] = null;
}
if (handler != null && fileRevision != null) {
handler.hanldeFileRevision(fileRevision);
fileRevision = null;
}
SVNDiffWindowBuilder builder = SVNDiffWindowBuilder.newInstance();
while (true) {
byte[] line = (byte[]) read("?W?B", buffer)[1];
if (line == null) {
// may be failure
read("[]", buffer);
break;
} else if (line.length == 0) {
// empty line, delta end.
break;
}
builder.accept(line, 0);
SVNDiffWindow window = builder.getDiffWindow();
if (window != null) {
builder.reset(1);
OutputStream os = handler.handleDiffWindow(name == null ? path : name, window);
long length = window.getNewDataLength();
while (length > 0) {
byte[] contents = (byte[]) myConnection.read("B", null)[0];
length -= contents.length;
try {
if (os != null) {
os.write(contents);
}
- } catch (IOException th) {}
+ } catch (IOException th) {
+ DebugLog.error(th);
+ }
}
try {
- os.close();
- } catch (IOException th) {}
+ if (os != null) {
+ os.close();
+ }
+ } catch (IOException th) {
+ DebugLog.error(th);
+ }
}
}
handler.hanldeDiffWindowClosed(name == null ? path : name);
}
} finally {
closeConnection();
}
}
public int log(String[] targetPaths, long startRevision, long endRevision, boolean changedPaths, boolean strictNode, ISVNLogEntryHandler handler)
throws SVNException {
int count = 0;
try {
openConnection();
String[] realTargetPaths = new String[targetPaths.length];
// convert all paths to paths relative to repos root.
for (int i = 0; i < realTargetPaths.length; i++) {
realTargetPaths[i] = getRepositoryPath(targetPaths[i]);
}
Object[] buffer = new Object[] { "log", realTargetPaths, getRevisionObject(startRevision), getRevisionObject(endRevision),
Boolean.valueOf(changedPaths), Boolean.valueOf(strictNode) };
write("(w((*s)(n)(n)ww))", buffer);
authenticate();
while (true) {
try {
read("((", buffer);
Map changedPathsMap = null;
if (changedPaths) {
changedPathsMap = handler != null ? new HashMap() : null;
while (true) {
try {
read("(SW(?S?N))", buffer);
if (changedPathsMap != null) {
String path = SVNReader.getString(buffer, 0);
if (path != null && !"".equals(path.trim())) {
String type = SVNReader.getString(buffer, 1);
String copyPath = SVNReader.getString(buffer, 2);
long copyRev = SVNReader.getLong(buffer, 3);
changedPathsMap.put(path, new SVNLogEntryPath(path, type.charAt(0), copyPath, copyRev));
}
}
} catch (SVNException e) {
break;
}
}
}
read(")N(?S)(?S)(?S))", buffer);
if (handler != null) {
long revision = SVNReader.getLong(buffer, 0);
String author = SVNReader.getString(buffer, 1);
Date date = SVNReader.getDate(buffer, 2);
String message = SVNReader.getString(buffer, 3);
// remove all
handler.handleLogEntry(new SVNLogEntry(changedPathsMap, revision, author, date, message));
}
count++;
} catch (SVNException e) {
read("x", buffer);
return count;
}
}
} finally {
closeConnection();
}
}
public void update(long revision, String target, boolean recursive, ISVNReporterBaton reporter, ISVNEditor editor) throws SVNException {
target = target == null ? "" : target;
Object[] buffer = new Object[] { "update", getRevisionObject(revision), target, Boolean.valueOf(recursive) };
try {
openConnection();
write("(w((n)sw))", buffer);
authenticate();
reporter.report(this);
authenticate();
read("*E", new Object[] { editor });
} finally {
closeConnection();
}
}
public void update(String url, long revision, String target, boolean recursive, ISVNReporterBaton reporter, ISVNEditor editor) throws SVNException {
target = target == null ? "" : target;
url = getCanonicalURL(url);
if (url == null) {
throw new SVNException(url + ": not valid URL");
}
Object[] buffer = new Object[] { "switch", getRevisionObject(revision), target, Boolean.valueOf(recursive), url };
try {
openConnection();
write("(w((n)sws))", buffer);
authenticate();
reporter.report(this);
authenticate();
read("*E", new Object[] { editor });
} finally {
closeConnection();
}
}
public void diff(String url, long revision, String target, boolean ignoreAncestry, boolean recursive, ISVNReporterBaton reporter, ISVNEditor editor)
throws SVNException {
target = target == null ? "" : target;
url = getCanonicalURL(url);
if (url == null) {
throw new SVNException(url + ": not valid URL");
}
Object[] buffer = new Object[] { "diff", getRevisionObject(revision), target, Boolean.valueOf(ignoreAncestry), Boolean.valueOf(recursive), url };
try {
openConnection();
write("(w((n)swws))", buffer);
authenticate();
reporter.report(this);
authenticate();
read("*E", new Object[] { editor });
} finally {
closeConnection();
}
}
public void status(long revision, String target, boolean recursive, ISVNReporterBaton reporter, ISVNEditor editor) throws SVNException {
target = target == null ? "" : target;
Object[] buffer = new Object[] { "status", target, Boolean.valueOf(recursive), getRevisionObject(revision) };
try {
openConnection();
write("(w(sw(n)))", buffer);
authenticate();
reporter.report(this);
authenticate();
read("*E", new Object[] { editor });
} finally {
closeConnection();
}
}
public void setRevisionPropertyValue(long revision, String propertyName, String propertyValue) throws SVNException {
assertValidRevision(revision);
Object[] buffer = new Object[] { "change-rev-prop", getRevisionObject(revision), propertyName, propertyValue };
try {
openConnection();
write("(w(nss))", buffer);
authenticate();
read("[()]", buffer);
} finally {
closeConnection();
}
}
public ISVNEditor getCommitEditor(String logMessage, final ISVNWorkspaceMediator mediator) throws SVNException {
try {
openConnection();
write("(w(s))", new Object[] { "commit", logMessage });
authenticate();
read("[()]", null);
SVNCommitEditor editor = new SVNCommitEditor(this, myConnection, mediator, new Runnable() {
public void run() {
closeConnection();
}
});
return editor;
} catch (SVNException e) {
closeConnection();
throw e;
}
}
void updateCredentials(String uuid, String root) {
if (getRepositoryRoot() != null) {
return;
}
root = root == null ? getRepositoryRoot() : root;
uuid = uuid == null ? getRepositoryUUID() : uuid;
// remove url path from root.
if (root != null) {
myFullRoot = root;
root = root.startsWith("svn://") ? root.substring("svn://".length()) : root.substring("svn+ssh://".length());
root = PathUtil.removeTrailingSlash(root);
if (root.indexOf('/') >= 0) {
root = root.substring(root.indexOf('/'));
} else {
root = "/";
}
}
DebugLog.log("root: " + root);
DebugLog.log("full root: " + myFullRoot);
setRepositoryCredentials(uuid, root);
}
private void openConnection() throws SVNException {
lock();
ISVNConnector connector = SVNRepositoryFactoryImpl.getConnectorFactory().createConnector(this);
myConnection = new SVNConnection(connector);
myConnection.open(this);
authenticate();
}
void authenticate() throws SVNException {
if (myConnection != null) {
myConnection.authenticate(this, myCredentials);
}
}
private void closeConnection() {
if (myConnection != null) {
try {
myConnection.close();
} catch (SVNException e) {} finally {
myConnection = null;
}
}
unlock();
}
private void write(String template, Object[] values) throws SVNException {
if (myConnection == null) {
throw new SVNException("connection is closed, can't write");
}
myConnection.write(template, values);
}
private Object[] read(String template, Object[] values) throws SVNException {
if (myConnection == null) {
throw new SVNException("connection is closed, can't read");
}
return myConnection.read(template, values);
}
/*
* ISVNReporter methods
*/
public void setPath(String path, long revision, boolean startEmpty) throws SVNException {
assertValidRevision(revision);
write("(w(snw))", new Object[] { "set-path", path, getRevisionObject(revision), Boolean.valueOf(startEmpty) });
}
public void deletePath(String path) throws SVNException {
write("(w(s))", new Object[] { "delete-path", path });
}
public void linkPath(SVNRepositoryLocation repository, String path, long revison, boolean startEmtpy) throws SVNException {
assertValidRevision(revison);
write("(w(ssnw))", new Object[] { "link-path", path, repository.toString(), getRevisionObject(revison), Boolean.valueOf(startEmtpy) });
}
public void finishReport() throws SVNException {
write("(w())", new Object[] { "finish-report" });
}
public void abortReport() throws SVNException {
write("(w())", new Object[] { "abort-report" });
}
public String getRepositoryPath(String path) {
if (path != null && path.startsWith("/")) {
// assuming it is full path.
return path;
}
String fullPath = PathUtil.append(PathUtil.decode(getLocation().getPath()), path);
// substract root path
if (fullPath.startsWith(getRepositoryRoot())) {
fullPath = fullPath.substring(getRepositoryRoot().length());
}
if (!fullPath.startsWith("/")) {
fullPath = "/" + fullPath;
}
return PathUtil.removeTrailingSlash(PathUtil.isEmpty(fullPath) ? "/" : fullPath);
}
public void setCredentials(ISVNCredentials credentials) {
myCredentials = credentials;
}
public ISVNCredentials getCredentials() {
return myCredentials;
}
public String getFullRoot() {
return myFullRoot;
}
}
| false
| true
|
public int getFileRevisions(String path, long sRevision, long eRevision, ISVNFileRevisionHandler handler) throws SVNException {
Long srev = getRevisionObject(sRevision);
Long erev = getRevisionObject(eRevision);
int count = 0;
try {
openConnection();
Object[] buffer = new Object[] { "get-file-revs", getRepositoryPath(path), srev, erev };
write("(w(s(n)(n)))", buffer);
authenticate();
while (true) {
SVNFileRevision fileRevision = null;
try {
read("(SN(*P)(*Z))", buffer);
count++;
} catch (SVNException e) {
read("x", buffer);
return count;
}
String name = null;
if (handler != null) {
name = (String) buffer[0];
long revision = SVNReader.getLong(buffer, 1);
Map properties = SVNReader.getMap(buffer, 2);
Map propertiesDelta = SVNReader.getMap(buffer, 3);
if (name != null) {
fileRevision = new SVNFileRevision(name, revision, properties, propertiesDelta);
}
buffer[2] = null;
buffer[3] = null;
}
if (handler != null && fileRevision != null) {
handler.hanldeFileRevision(fileRevision);
fileRevision = null;
}
SVNDiffWindowBuilder builder = SVNDiffWindowBuilder.newInstance();
while (true) {
byte[] line = (byte[]) read("?W?B", buffer)[1];
if (line == null) {
// may be failure
read("[]", buffer);
break;
} else if (line.length == 0) {
// empty line, delta end.
break;
}
builder.accept(line, 0);
SVNDiffWindow window = builder.getDiffWindow();
if (window != null) {
builder.reset(1);
OutputStream os = handler.handleDiffWindow(name == null ? path : name, window);
long length = window.getNewDataLength();
while (length > 0) {
byte[] contents = (byte[]) myConnection.read("B", null)[0];
length -= contents.length;
try {
if (os != null) {
os.write(contents);
}
} catch (IOException th) {}
}
try {
os.close();
} catch (IOException th) {}
}
}
handler.hanldeDiffWindowClosed(name == null ? path : name);
}
} finally {
closeConnection();
}
}
|
public int getFileRevisions(String path, long sRevision, long eRevision, ISVNFileRevisionHandler handler) throws SVNException {
Long srev = getRevisionObject(sRevision);
Long erev = getRevisionObject(eRevision);
int count = 0;
try {
openConnection();
Object[] buffer = new Object[] { "get-file-revs", getRepositoryPath(path), srev, erev };
write("(w(s(n)(n)))", buffer);
authenticate();
while (true) {
SVNFileRevision fileRevision = null;
try {
read("(SN(*P)(*Z))", buffer);
count++;
} catch (SVNException e) {
read("x", buffer);
return count;
}
String name = null;
if (handler != null) {
name = (String) buffer[0];
long revision = SVNReader.getLong(buffer, 1);
Map properties = SVNReader.getMap(buffer, 2);
Map propertiesDelta = SVNReader.getMap(buffer, 3);
if (name != null) {
fileRevision = new SVNFileRevision(name, revision, properties, propertiesDelta);
}
buffer[2] = null;
buffer[3] = null;
}
if (handler != null && fileRevision != null) {
handler.hanldeFileRevision(fileRevision);
fileRevision = null;
}
SVNDiffWindowBuilder builder = SVNDiffWindowBuilder.newInstance();
while (true) {
byte[] line = (byte[]) read("?W?B", buffer)[1];
if (line == null) {
// may be failure
read("[]", buffer);
break;
} else if (line.length == 0) {
// empty line, delta end.
break;
}
builder.accept(line, 0);
SVNDiffWindow window = builder.getDiffWindow();
if (window != null) {
builder.reset(1);
OutputStream os = handler.handleDiffWindow(name == null ? path : name, window);
long length = window.getNewDataLength();
while (length > 0) {
byte[] contents = (byte[]) myConnection.read("B", null)[0];
length -= contents.length;
try {
if (os != null) {
os.write(contents);
}
} catch (IOException th) {
DebugLog.error(th);
}
}
try {
if (os != null) {
os.close();
}
} catch (IOException th) {
DebugLog.error(th);
}
}
}
handler.hanldeDiffWindowClosed(name == null ? path : name);
}
} finally {
closeConnection();
}
}
|
diff --git a/src/main/java/edu/msergey/jalg/exercises/ch3/ex41/DoubleLinkedList.java b/src/main/java/edu/msergey/jalg/exercises/ch3/ex41/DoubleLinkedList.java
index 28b85d2..64361cc 100644
--- a/src/main/java/edu/msergey/jalg/exercises/ch3/ex41/DoubleLinkedList.java
+++ b/src/main/java/edu/msergey/jalg/exercises/ch3/ex41/DoubleLinkedList.java
@@ -1,87 +1,87 @@
package edu.msergey.jalg.exercises.ch3.ex41;
public class DoubleLinkedList<E extends Comparable<E>> {
private Node<E> head;
private Node<E> tail;
public Node<E> getHead() {
return head;
}
public Node<E> getTail() {
return tail;
}
public void addTail(Node<E> node) {
if (tail != null) {
tail.next = node;
node.prev = tail;
node.next = null;
tail = node;
} else {
head = node;
tail = node;
head.prev = null;
tail.next = null;
}
}
public void addHead(Node<E> node) {
if (head != null) {
head.prev = node;
node.prev = null;
node.next = head;
head = node;
} else {
head = node;
tail = node;
head.prev = null;
tail.next = null;
}
}
- public DoubleLinkedList<E> removeAndCopy(IRemoveChecker removeChecker) {
+ public DoubleLinkedList<E> removeAndCopy(IRemoveChecker<E> removeChecker) {
DoubleLinkedList<E> newList = new DoubleLinkedList<E>();
if (head != tail) {
for (Node<E> current = head.next; current != tail; current = current.next) {
if (removeChecker.needToRemove(current)) {
current.prev.next = current.next;
current.next.prev = current.prev;
Node<E> prevCurrent = current.prev;
current.next = null;
current.prev = null;
newList.addTail(new Node<E>(current.value));
current = prevCurrent;
}
}
}
if (removeChecker.needToRemove(head)) {
Node<E> nextHead = head.next;
head.next = null;
newList.addHead(new Node<E>(head.value));
head = nextHead;
if (head == null) return newList;
head.prev = null;
}
if (removeChecker.needToRemove(tail)) {
Node<E> prevTail = tail.prev;
tail.prev = null;
newList.addTail(new Node<E>(tail.value));
tail = prevTail;
if (tail == null) return newList;
tail.next = null;
}
return newList;
}
}
| true
| true
|
public DoubleLinkedList<E> removeAndCopy(IRemoveChecker removeChecker) {
DoubleLinkedList<E> newList = new DoubleLinkedList<E>();
if (head != tail) {
for (Node<E> current = head.next; current != tail; current = current.next) {
if (removeChecker.needToRemove(current)) {
current.prev.next = current.next;
current.next.prev = current.prev;
Node<E> prevCurrent = current.prev;
current.next = null;
current.prev = null;
newList.addTail(new Node<E>(current.value));
current = prevCurrent;
}
}
}
if (removeChecker.needToRemove(head)) {
Node<E> nextHead = head.next;
head.next = null;
newList.addHead(new Node<E>(head.value));
head = nextHead;
if (head == null) return newList;
head.prev = null;
}
if (removeChecker.needToRemove(tail)) {
Node<E> prevTail = tail.prev;
tail.prev = null;
newList.addTail(new Node<E>(tail.value));
tail = prevTail;
if (tail == null) return newList;
tail.next = null;
}
return newList;
}
|
public DoubleLinkedList<E> removeAndCopy(IRemoveChecker<E> removeChecker) {
DoubleLinkedList<E> newList = new DoubleLinkedList<E>();
if (head != tail) {
for (Node<E> current = head.next; current != tail; current = current.next) {
if (removeChecker.needToRemove(current)) {
current.prev.next = current.next;
current.next.prev = current.prev;
Node<E> prevCurrent = current.prev;
current.next = null;
current.prev = null;
newList.addTail(new Node<E>(current.value));
current = prevCurrent;
}
}
}
if (removeChecker.needToRemove(head)) {
Node<E> nextHead = head.next;
head.next = null;
newList.addHead(new Node<E>(head.value));
head = nextHead;
if (head == null) return newList;
head.prev = null;
}
if (removeChecker.needToRemove(tail)) {
Node<E> prevTail = tail.prev;
tail.prev = null;
newList.addTail(new Node<E>(tail.value));
tail = prevTail;
if (tail == null) return newList;
tail.next = null;
}
return newList;
}
|
diff --git a/src/main/java/com/xebialabs/overthere/cifs/winrm/connector/Kb5HttpConnector.java b/src/main/java/com/xebialabs/overthere/cifs/winrm/connector/Kb5HttpConnector.java
index eaaafa7..67d596c 100644
--- a/src/main/java/com/xebialabs/overthere/cifs/winrm/connector/Kb5HttpConnector.java
+++ b/src/main/java/com/xebialabs/overthere/cifs/winrm/connector/Kb5HttpConnector.java
@@ -1,357 +1,357 @@
/*
* Kb5HttpConnector.java
*
* User: Greg Schueler <a href="mailto:greg@dtosolutions.com">greg@dtosolutions.com</a>
* Created: 5/23/12 3:08 PM
*
*/
package com.xebialabs.overthere.cifs.winrm.connector;
import com.google.common.io.Closeables;
import com.xebialabs.overthere.ConnectionOptions;
import com.xebialabs.overthere.cifs.CifsConnectionBuilder;
import com.xebialabs.overthere.cifs.winrm.soap.SoapAction;
import com.xebialabs.overthere.cifs.winrm.exception.BlankValueRuntimeException;
import com.xebialabs.overthere.cifs.winrm.exception.InvalidFilePathRuntimeException;
import com.xebialabs.overthere.cifs.winrm.exception.WinRMRuntimeIOException;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.auth.AuthScope;
import org.apache.http.HttpResponse;
import org.apache.http.auth.Credentials;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.params.AuthPolicy;
import org.apache.http.client.params.ClientPNames;
import org.apache.http.conn.scheme.Scheme;
import org.apache.http.conn.ssl.*;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.auth.KerberosSchemeFactory;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.protocol.BasicHttpContext;
import org.apache.http.protocol.HttpContext;
import org.apache.http.util.EntityUtils;
import org.dom4j.Document;
import org.dom4j.DocumentHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.security.auth.Subject;
import javax.security.auth.callback.*;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import java.io.*;
import java.net.URL;
import java.security.*;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.HashMap;
import static com.xebialabs.overthere.ConnectionOptions.*;
/**
* Kb5HttpConnector enables Kerberos authentication over HTTP(S).
*
* @author Greg Schueler <a href="mailto:greg@dtosolutions.com">greg@dtosolutions.com</a>
*/
public class Kb5HttpConnector extends JdkHttpConnector {
private static Logger logger = LoggerFactory.getLogger(Kb5HttpConnector.class);
ConnectionOptions options;
private String httpsCertTrustStrategy;
private String httpsHostnameVerifyStrategy;
private String username;
private String password;
private boolean debugKerberosAuth;
public Kb5HttpConnector(final URL targetURL, final ConnectionOptions options) {
super(targetURL, null);
this.options = options;
this.httpsCertTrustStrategy = options.getOptional(CifsConnectionBuilder.HTTPS_CERTIFICATE_TRUST_STRATEGY);
this.httpsHostnameVerifyStrategy = options.getOptional(CifsConnectionBuilder.HTTPS_HOSTNAME_VERIFY_STRATEGY);
this.username = options.getOptional(USERNAME);
this.password = options.getOptional(PASSWORD);
this.debugKerberosAuth = options.<Boolean>get(CifsConnectionBuilder.DEBUG_KERBEROS_AUTH, false);
}
/**
* Override the sendMessage method to use custom authentication over HTTP
*/
@Override
public Document sendMessage(final Document requestDocument, final SoapAction soapAction) {
return runPrivileged(new PrivilegedSendMessage(this, requestDocument, soapAction));
}
/**
* Perform the JAAS login and run the command within a privileged scope.
*
* @param privilegedSendMessage the PrivilegedSendMessage
*
* @return The result Document
*/
private Document runPrivileged(final PrivilegedSendMessage privilegedSendMessage) {
final CallbackHandler handler = new ProvidedAuthCallback(username, password);
Document result;
try {
final LoginContext lc = new LoginContext("", null, handler, new KerberosJaasConfiguration(debugKerberosAuth));
lc.login();
result = Subject.doAs(lc.getSubject(), privilegedSendMessage);
} catch (LoginException e) {
throw new WinRMRuntimeIOException("Login failure sending message on " + getTargetURL() + " error: "+e.getMessage(),
privilegedSendMessage.getRequestDocument(), null,
e);
} catch (PrivilegedActionException e) {
throw new WinRMRuntimeIOException("Failure sending message on " + getTargetURL() + " error: " + e
.getMessage(),
privilegedSendMessage.getRequestDocument(), null,
e.getException());
}
return result;
}
/**
* CallbackHandler that uses provided username/password credentials.
*/
private static class ProvidedAuthCallback implements CallbackHandler {
private String username;
private String password;
ProvidedAuthCallback(final String username, final String password) {
this.username = username;
this.password = password;
}
public void handle(final Callback[] callbacks) throws IOException, UnsupportedCallbackException {
for (final Callback callback : callbacks) {
if (callback instanceof NameCallback) {
final NameCallback nc = (NameCallback) callback;
nc.setName(username);
} else if (callback instanceof PasswordCallback) {
final PasswordCallback pc = (PasswordCallback) callback;
pc.setPassword(password.toCharArray());
} else {
throw new UnsupportedCallbackException
(callback, "Unrecognized Callback");
}
}
}
}
private static class KerberosJaasConfiguration extends Configuration {
private boolean debug;
private KerberosJaasConfiguration(boolean debug) {
this.debug = debug;
}
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String s) {
final HashMap<String, String> options = new HashMap<String, String>();
options.put("client", "true");
options.put("useTicketCache", "false");
options.put("useKeyTab", "false");
options.put("doNotPrompt", "false");
if(debug) {
options.put("debug", "true");
}
return new AppConfigurationEntry[] {new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options)};
}
}
/**
* PrivilegedActionException that wraps the internal sendMessage
*/
private static class PrivilegedSendMessage implements PrivilegedExceptionAction<Document> {
Kb5HttpConnector connector;
private Document requestDocument;
SoapAction soapAction;
private PrivilegedSendMessage(final Kb5HttpConnector connector, final Document requestDocument,
final SoapAction soapAction) {
this.connector = connector;
this.requestDocument = requestDocument;
this.soapAction = soapAction;
}
@Override
public Document run() throws Exception {
return connector.int_sendMessage(requestDocument, soapAction);
}
public Document getRequestDocument() {
return requestDocument;
}
}
/**
* Internal sendMessage, performs the HTTP request and returns the result document.
*/
private Document int_sendMessage(final Document requestDocument, final SoapAction soapAction) {
final DefaultHttpClient client = new DefaultHttpClient();
try {
configureHttpClient(client);
final HttpContext context = new BasicHttpContext();
final HttpPost request = new HttpPost(getTargetURL().toURI());
if (soapAction != null) {
request.setHeader("SOAPAction", soapAction.getValue());
}
final String requestDocAsString = toString(requestDocument);
logger.trace("Sending request to {}", getTargetURL());
logger.trace("Request body: {} {}", getTargetURL(), requestDocAsString);
final HttpEntity entity = createEntity(requestDocAsString);
request.setEntity(entity);
final HttpResponse response = client.execute(request, context);
- if (logger.isDebugEnabled()) {
+ if (logger.isTraceEnabled()) {
for (final Header header : response.getAllHeaders()) {
logger.trace("Header {}: {}", header.getName(), header.getValue());
}
}
if (response.getStatusLine().getStatusCode() != 200) {
throw new WinRMRuntimeIOException(
"Response code was " + response.getStatusLine().getStatusCode());
}
final String text = handleResponse(response, context);
EntityUtils.consume(response.getEntity());
logger.trace("Response body: {}", text);
return DocumentHelper.parseText(text);
} catch (BlankValueRuntimeException bvrte) {
throw bvrte;
} catch (InvalidFilePathRuntimeException ifprte) {
throw ifprte;
} catch (Exception e) {
throw new WinRMRuntimeIOException("Send message on " + getTargetURL() + " error ", requestDocument, null,
e);
} finally {
client.getConnectionManager().shutdown();
}
}
/**
* Configure the httpclient for use in all requests.
*/
private void configureHttpClient(final DefaultHttpClient httpclient) throws NoSuchAlgorithmException,
KeyManagementException,
KeyStoreException,
UnrecoverableKeyException {
configureTrust(httpclient);
configureAuthentication(httpclient);
}
/**
* Configure auth schemes to use for the HttpClient.
*/
protected void configureAuthentication(final DefaultHttpClient httpclient) {
final Credentials use_jaas_creds = new Credentials() {
public String getPassword() {
return password;
}
public Principal getUserPrincipal() {
return new KerberosPrincipal(username);
}
};
httpclient.getCredentialsProvider().setCredentials(new AuthScope(null, -1, null), use_jaas_creds);
httpclient.getParams().setBooleanParameter(ClientPNames.HANDLE_AUTHENTICATION, true);
}
/**
* Handle the httpResponse and return the SOAP XML String.
*/
protected String handleResponse(final HttpResponse response, final HttpContext context) throws IOException {
final HttpEntity entity = response.getEntity();
if (null == entity.getContentType() || !entity.getContentType().getValue().startsWith(
"application/soap+xml")) {
throw new WinRMRuntimeIOException(
"Send message on " + getTargetURL() + " error: Unexpected content-type: " + entity
.getContentType());
}
final InputStream is = entity.getContent();
final Writer writer = new StringWriter();
final Reader reader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
try {
int n;
final char[] buffer = new char[1024];
while ((n = reader.read(buffer)) != -1) {
writer.write(buffer, 0, n);
}
} finally {
Closeables.closeQuietly(reader);
Closeables.closeQuietly(is);
}
return writer.toString();
}
/**
* Configure certificate trust strategy and hostname verifier strategy for the HttpClient
*/
private void configureTrust(final DefaultHttpClient httpclient) throws NoSuchAlgorithmException,
KeyManagementException,
KeyStoreException, UnrecoverableKeyException {
if (!"https".equalsIgnoreCase(getTargetURL().getProtocol())) {
return;
}
final TrustStrategy trustStrategy;
final X509HostnameVerifier hostnameVerifier;
if ("all".equals(httpsCertTrustStrategy)) {
trustStrategy = new TrustStrategy() {
@Override
public boolean isTrusted(final X509Certificate[] chain, final String authType) throws
CertificateException {
return true;
}
};
} else if ("self-signed".equals(httpsCertTrustStrategy)) {
trustStrategy = new TrustSelfSignedStrategy();
} else {
//"default"
trustStrategy = null;
}
if ("all".equals(httpsHostnameVerifyStrategy)) {
hostnameVerifier = SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER;
} else if ("strict".equals(httpsHostnameVerifyStrategy)) {
hostnameVerifier = SSLSocketFactory.STRICT_HOSTNAME_VERIFIER;
} else {
//"browser-compatible"
hostnameVerifier = SSLSocketFactory.BROWSER_COMPATIBLE_HOSTNAME_VERIFIER;
}
if (null != trustStrategy || null != hostnameVerifier) {
if (logger.isDebugEnabled()) {
logger.debug("Configuring httpsTrustCertificates strategy: {}", httpsCertTrustStrategy);
logger.debug("Configuring httpsVerifyHostname strategy: {}", httpsHostnameVerifyStrategy);
}
final SSLSocketFactory socketFactory = new SSLSocketFactory(trustStrategy, hostnameVerifier);
final Scheme sch = new Scheme("https", 443, socketFactory);
httpclient.getConnectionManager().getSchemeRegistry().register(sch);
}
}
/**
* Create the HttpEntity to send in the request.
*/
protected HttpEntity createEntity(final String requestDocAsString) throws UnsupportedEncodingException {
return new StringEntity(requestDocAsString, ContentType.create("application/soap+xml", "UTF-8"));
}
}
| true
| true
|
private Document int_sendMessage(final Document requestDocument, final SoapAction soapAction) {
final DefaultHttpClient client = new DefaultHttpClient();
try {
configureHttpClient(client);
final HttpContext context = new BasicHttpContext();
final HttpPost request = new HttpPost(getTargetURL().toURI());
if (soapAction != null) {
request.setHeader("SOAPAction", soapAction.getValue());
}
final String requestDocAsString = toString(requestDocument);
logger.trace("Sending request to {}", getTargetURL());
logger.trace("Request body: {} {}", getTargetURL(), requestDocAsString);
final HttpEntity entity = createEntity(requestDocAsString);
request.setEntity(entity);
final HttpResponse response = client.execute(request, context);
if (logger.isDebugEnabled()) {
for (final Header header : response.getAllHeaders()) {
logger.trace("Header {}: {}", header.getName(), header.getValue());
}
}
if (response.getStatusLine().getStatusCode() != 200) {
throw new WinRMRuntimeIOException(
"Response code was " + response.getStatusLine().getStatusCode());
}
final String text = handleResponse(response, context);
EntityUtils.consume(response.getEntity());
logger.trace("Response body: {}", text);
return DocumentHelper.parseText(text);
} catch (BlankValueRuntimeException bvrte) {
throw bvrte;
} catch (InvalidFilePathRuntimeException ifprte) {
throw ifprte;
} catch (Exception e) {
throw new WinRMRuntimeIOException("Send message on " + getTargetURL() + " error ", requestDocument, null,
e);
} finally {
client.getConnectionManager().shutdown();
}
}
|
private Document int_sendMessage(final Document requestDocument, final SoapAction soapAction) {
final DefaultHttpClient client = new DefaultHttpClient();
try {
configureHttpClient(client);
final HttpContext context = new BasicHttpContext();
final HttpPost request = new HttpPost(getTargetURL().toURI());
if (soapAction != null) {
request.setHeader("SOAPAction", soapAction.getValue());
}
final String requestDocAsString = toString(requestDocument);
logger.trace("Sending request to {}", getTargetURL());
logger.trace("Request body: {} {}", getTargetURL(), requestDocAsString);
final HttpEntity entity = createEntity(requestDocAsString);
request.setEntity(entity);
final HttpResponse response = client.execute(request, context);
if (logger.isTraceEnabled()) {
for (final Header header : response.getAllHeaders()) {
logger.trace("Header {}: {}", header.getName(), header.getValue());
}
}
if (response.getStatusLine().getStatusCode() != 200) {
throw new WinRMRuntimeIOException(
"Response code was " + response.getStatusLine().getStatusCode());
}
final String text = handleResponse(response, context);
EntityUtils.consume(response.getEntity());
logger.trace("Response body: {}", text);
return DocumentHelper.parseText(text);
} catch (BlankValueRuntimeException bvrte) {
throw bvrte;
} catch (InvalidFilePathRuntimeException ifprte) {
throw ifprte;
} catch (Exception e) {
throw new WinRMRuntimeIOException("Send message on " + getTargetURL() + " error ", requestDocument, null,
e);
} finally {
client.getConnectionManager().shutdown();
}
}
|
diff --git a/StoreIntegrationTests/src/main/java/com/nearinfinity/honeycomb/mysql/HandleProxyIntegrationTest.java b/StoreIntegrationTests/src/main/java/com/nearinfinity/honeycomb/mysql/HandleProxyIntegrationTest.java
index f3a33199..818fa4bd 100644
--- a/StoreIntegrationTests/src/main/java/com/nearinfinity/honeycomb/mysql/HandleProxyIntegrationTest.java
+++ b/StoreIntegrationTests/src/main/java/com/nearinfinity/honeycomb/mysql/HandleProxyIntegrationTest.java
@@ -1,52 +1,52 @@
package com.nearinfinity.honeycomb.mysql;
import com.google.common.collect.Lists;
import com.nearinfinity.honeycomb.mysql.gen.ColumnSchema;
import com.nearinfinity.honeycomb.mysql.gen.ColumnType;
import com.nearinfinity.honeycomb.mysql.gen.IndexSchema;
import com.nearinfinity.honeycomb.mysql.gen.TableSchema;
import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
import java.util.HashMap;
public class HandleProxyIntegrationTest {
private static HandlerProxyFactory factory;
public static void suiteSetup() throws IOException, SAXException, ParserConfigurationException {
factory = Bootstrap.startup();
}
public static void testSuccessfulRename() throws Exception {
final String newTableName = "test2";
HandlerProxy proxy = factory.createHandlerProxy();
TableSchema schema = getTableSchema();
- proxy.createTable("hbase", "test", Util.serializeTableSchema(schema));
+ proxy.createTable("hbase", "test", "hbase", Util.serializeTableSchema(schema), 0);
proxy.renameTable(newTableName);
assert (newTableName.equals(proxy.getTableName()));
proxy.dropTable();
}
private static TableSchema getTableSchema() {
HashMap<String, ColumnSchema> columns = new HashMap<String, ColumnSchema>();
HashMap<String, IndexSchema> indices = new HashMap<String, IndexSchema>();
columns.put("c1", new ColumnSchema(ColumnType.LONG, true, false, 8, 0, 0));
columns.put("c2", new ColumnSchema(ColumnType.LONG, true, false, 8, 0, 0));
indices.put("i1", new IndexSchema(Lists.newArrayList("c1"), false));
return new TableSchema(columns, indices);
}
public static void main(String[] args) throws Exception {
try {
suiteSetup();
testSuccessfulRename();
} catch (Exception e) {
System.out.println(e);
System.exit(1);
}
}
}
| true
| true
|
public static void testSuccessfulRename() throws Exception {
final String newTableName = "test2";
HandlerProxy proxy = factory.createHandlerProxy();
TableSchema schema = getTableSchema();
proxy.createTable("hbase", "test", Util.serializeTableSchema(schema));
proxy.renameTable(newTableName);
assert (newTableName.equals(proxy.getTableName()));
proxy.dropTable();
}
|
public static void testSuccessfulRename() throws Exception {
final String newTableName = "test2";
HandlerProxy proxy = factory.createHandlerProxy();
TableSchema schema = getTableSchema();
proxy.createTable("hbase", "test", "hbase", Util.serializeTableSchema(schema), 0);
proxy.renameTable(newTableName);
assert (newTableName.equals(proxy.getTableName()));
proxy.dropTable();
}
|
diff --git a/Evil.java b/Evil.java
index c4a0555..fb33c63 100644
--- a/Evil.java
+++ b/Evil.java
@@ -1,226 +1,228 @@
import org.antlr.runtime.*;
import org.antlr.runtime.tree.*;
import org.antlr.stringtemplate.*;
import java.io.*;
import java.util.*;
public class Evil
{
public static void main(String[] args)
{
parseParameters(args);
CommonTokenStream tokens = new CommonTokenStream(createLexer());
EvilParser parser = new EvilParser(tokens);
EvilParser.program_return ret = null;
try
{
ret = parser.program();
}
catch (org.antlr.runtime.RecognitionException e)
{
error(e.toString());
}
CommonTree t = (CommonTree)ret.getTree();
if (_displayAST && t != null)
{
DOTTreeGenerator gen = new DOTTreeGenerator();
StringTemplate st = gen.toDOT(t);
System.out.println(st);
}
/*
To create and invoke a tree parser. Modify with the appropriate
name of the tree parser and the appropriate start rule.
*/
ArrayList<FuncBlock> blist = new ArrayList<FuncBlock>();
HashMap<String, Type> vartable = null;
HashMap<String, StructType> structtable = null;
try
{
CommonTreeNodeStream nodes = new CommonTreeNodeStream(t);
nodes.setTokenStream(tokens);
TypeCheck tparser = new TypeCheck(nodes);
structtable = new HashMap<String, StructType>();
vartable = new HashMap<String, Type>();
tparser.verify(new HashMap<String, FuncType>(), structtable, vartable);
nodes = new CommonTreeNodeStream(t);
nodes.setTokenStream(tokens);
ILOC iloc = new ILOC(nodes);
iloc.generate(blist,structtable,vartable);
}
catch (org.antlr.runtime.RecognitionException e)
{
error(e.toString());
}
String ilocstr = "";
if(!_dumpSPARC)
{
for(FuncBlock b : blist){
if(b.name.equals("main")){
ilocstr += "@function " + b.name + "\n";
}
}
for(FuncBlock b : blist){
if(!b.name.equals("main")){
ilocstr += "@function " + b.name + "\n";
}
}
for(FuncBlock b : blist){
ilocstr += b.getHeader(false) + "\n";
}
for(FuncBlock b : blist){
if(b.name.equals("main")){
ilocstr += b.getInstructions(false) + "\n";
}
}
for(FuncBlock b : blist){
if(!b.name.equals("main")){
ilocstr += b.getInstructions(false) + "\n";
}
}
if(!_dumpIL){
System.out.println(ilocstr);
}
else{
try{
FileOutputStream f = new FileOutputStream(_inputFile.replace(".ev", "") + ".il");
f.write(ilocstr.getBytes());
f.close();
}
catch(IOException e){
e.printStackTrace();
}
}
}
else
{
RegisterAllocator ra = new RegisterAllocator(blist);
ra.color();
System.out.println(".section\t\".text\"");
System.out.println(".align 4");
for(String s : vartable.keySet())
{
System.out.println(".common\t" + s + ", 4, 4");
}
for(FuncBlock b : blist)
{
System.out.println(".global " + b.name);
System.out.println(".type\t" + b.name + ", #function");
}
for(FuncBlock b : blist)
{
if(b.name.equals("main"))
{
System.out.println(b.getHeader(true));
System.out.println(b.getInstructions(true));
+ System.out.println(" ret\n restore");
}
}
for(FuncBlock b : blist)
{
if(!b.name.equals("main"))
{
System.out.println(b.getHeader(true));
System.out.println(b.getInstructions(true));
+ System.out.println(" ret\n restore");
}
}
System.out.println();
System.out.println(".section\t\".rodata\"");
System.out.println(".align 8");
System.out.println(".LLC0:");
System.out.println(".asciz\t\"%d \"");
System.out.println(".align 8");
System.out.println(".LLC1:");
System.out.println(".asciz\t\"%d\\n\"");
System.out.println(".align 8");
System.out.println(".LLC2:");
System.out.println(".asciz\t\"%d\"");
System.out.println();
System.out.println(".common\t_read, 4, 4");
}
//RegisterAllocator ra = new RegisterAllocator(blist);
//ra.color();
}
private static final String DISPLAYAST = "-displayAST";
private static final String DUMPIL = "-dumpIL";
private static final String DUMPSPARC = "-dumpSPARC";
private static String _inputFile = null;
private static boolean _displayAST = false;
private static boolean _dumpIL = false;
private static boolean _dumpSPARC = false;
private static void parseParameters(String [] args)
{
for (int i = 0; i < args.length; i++)
{
if (args[i].equals(DISPLAYAST))
{
_displayAST = true;
}
else if(args[i].equals(DUMPIL))
{
_dumpIL = true;
}
else if(args[i].equals(DUMPSPARC))
{
_dumpSPARC = true;
}
else if (args[i].charAt(0) == '-')
{
System.err.println("unexpected option: " + args[i]);
System.exit(1);
}
else if (_inputFile != null)
{
System.err.println("too many files specified");
System.exit(1);
}
else
{
_inputFile = args[i];
}
}
}
private static void error(String msg)
{
System.err.println(msg);
System.exit(1);
}
private static EvilLexer createLexer()
{
try
{
ANTLRInputStream input;
if (_inputFile == null)
{
input = new ANTLRInputStream(System.in);
}
else
{
input = new ANTLRInputStream(
new BufferedInputStream(new FileInputStream(_inputFile)));
}
return new EvilLexer(input);
}
catch (java.io.IOException e)
{
System.err.println("file not found: " + _inputFile);
System.exit(1);
return null;
}
}
}
| false
| true
|
public static void main(String[] args)
{
parseParameters(args);
CommonTokenStream tokens = new CommonTokenStream(createLexer());
EvilParser parser = new EvilParser(tokens);
EvilParser.program_return ret = null;
try
{
ret = parser.program();
}
catch (org.antlr.runtime.RecognitionException e)
{
error(e.toString());
}
CommonTree t = (CommonTree)ret.getTree();
if (_displayAST && t != null)
{
DOTTreeGenerator gen = new DOTTreeGenerator();
StringTemplate st = gen.toDOT(t);
System.out.println(st);
}
/*
To create and invoke a tree parser. Modify with the appropriate
name of the tree parser and the appropriate start rule.
*/
ArrayList<FuncBlock> blist = new ArrayList<FuncBlock>();
HashMap<String, Type> vartable = null;
HashMap<String, StructType> structtable = null;
try
{
CommonTreeNodeStream nodes = new CommonTreeNodeStream(t);
nodes.setTokenStream(tokens);
TypeCheck tparser = new TypeCheck(nodes);
structtable = new HashMap<String, StructType>();
vartable = new HashMap<String, Type>();
tparser.verify(new HashMap<String, FuncType>(), structtable, vartable);
nodes = new CommonTreeNodeStream(t);
nodes.setTokenStream(tokens);
ILOC iloc = new ILOC(nodes);
iloc.generate(blist,structtable,vartable);
}
catch (org.antlr.runtime.RecognitionException e)
{
error(e.toString());
}
String ilocstr = "";
if(!_dumpSPARC)
{
for(FuncBlock b : blist){
if(b.name.equals("main")){
ilocstr += "@function " + b.name + "\n";
}
}
for(FuncBlock b : blist){
if(!b.name.equals("main")){
ilocstr += "@function " + b.name + "\n";
}
}
for(FuncBlock b : blist){
ilocstr += b.getHeader(false) + "\n";
}
for(FuncBlock b : blist){
if(b.name.equals("main")){
ilocstr += b.getInstructions(false) + "\n";
}
}
for(FuncBlock b : blist){
if(!b.name.equals("main")){
ilocstr += b.getInstructions(false) + "\n";
}
}
if(!_dumpIL){
System.out.println(ilocstr);
}
else{
try{
FileOutputStream f = new FileOutputStream(_inputFile.replace(".ev", "") + ".il");
f.write(ilocstr.getBytes());
f.close();
}
catch(IOException e){
e.printStackTrace();
}
}
}
else
{
RegisterAllocator ra = new RegisterAllocator(blist);
ra.color();
System.out.println(".section\t\".text\"");
System.out.println(".align 4");
for(String s : vartable.keySet())
{
System.out.println(".common\t" + s + ", 4, 4");
}
for(FuncBlock b : blist)
{
System.out.println(".global " + b.name);
System.out.println(".type\t" + b.name + ", #function");
}
for(FuncBlock b : blist)
{
if(b.name.equals("main"))
{
System.out.println(b.getHeader(true));
System.out.println(b.getInstructions(true));
}
}
for(FuncBlock b : blist)
{
if(!b.name.equals("main"))
{
System.out.println(b.getHeader(true));
System.out.println(b.getInstructions(true));
}
}
System.out.println();
System.out.println(".section\t\".rodata\"");
System.out.println(".align 8");
System.out.println(".LLC0:");
System.out.println(".asciz\t\"%d \"");
System.out.println(".align 8");
System.out.println(".LLC1:");
System.out.println(".asciz\t\"%d\\n\"");
System.out.println(".align 8");
System.out.println(".LLC2:");
System.out.println(".asciz\t\"%d\"");
System.out.println();
System.out.println(".common\t_read, 4, 4");
}
//RegisterAllocator ra = new RegisterAllocator(blist);
//ra.color();
}
|
public static void main(String[] args)
{
parseParameters(args);
CommonTokenStream tokens = new CommonTokenStream(createLexer());
EvilParser parser = new EvilParser(tokens);
EvilParser.program_return ret = null;
try
{
ret = parser.program();
}
catch (org.antlr.runtime.RecognitionException e)
{
error(e.toString());
}
CommonTree t = (CommonTree)ret.getTree();
if (_displayAST && t != null)
{
DOTTreeGenerator gen = new DOTTreeGenerator();
StringTemplate st = gen.toDOT(t);
System.out.println(st);
}
/*
To create and invoke a tree parser. Modify with the appropriate
name of the tree parser and the appropriate start rule.
*/
ArrayList<FuncBlock> blist = new ArrayList<FuncBlock>();
HashMap<String, Type> vartable = null;
HashMap<String, StructType> structtable = null;
try
{
CommonTreeNodeStream nodes = new CommonTreeNodeStream(t);
nodes.setTokenStream(tokens);
TypeCheck tparser = new TypeCheck(nodes);
structtable = new HashMap<String, StructType>();
vartable = new HashMap<String, Type>();
tparser.verify(new HashMap<String, FuncType>(), structtable, vartable);
nodes = new CommonTreeNodeStream(t);
nodes.setTokenStream(tokens);
ILOC iloc = new ILOC(nodes);
iloc.generate(blist,structtable,vartable);
}
catch (org.antlr.runtime.RecognitionException e)
{
error(e.toString());
}
String ilocstr = "";
if(!_dumpSPARC)
{
for(FuncBlock b : blist){
if(b.name.equals("main")){
ilocstr += "@function " + b.name + "\n";
}
}
for(FuncBlock b : blist){
if(!b.name.equals("main")){
ilocstr += "@function " + b.name + "\n";
}
}
for(FuncBlock b : blist){
ilocstr += b.getHeader(false) + "\n";
}
for(FuncBlock b : blist){
if(b.name.equals("main")){
ilocstr += b.getInstructions(false) + "\n";
}
}
for(FuncBlock b : blist){
if(!b.name.equals("main")){
ilocstr += b.getInstructions(false) + "\n";
}
}
if(!_dumpIL){
System.out.println(ilocstr);
}
else{
try{
FileOutputStream f = new FileOutputStream(_inputFile.replace(".ev", "") + ".il");
f.write(ilocstr.getBytes());
f.close();
}
catch(IOException e){
e.printStackTrace();
}
}
}
else
{
RegisterAllocator ra = new RegisterAllocator(blist);
ra.color();
System.out.println(".section\t\".text\"");
System.out.println(".align 4");
for(String s : vartable.keySet())
{
System.out.println(".common\t" + s + ", 4, 4");
}
for(FuncBlock b : blist)
{
System.out.println(".global " + b.name);
System.out.println(".type\t" + b.name + ", #function");
}
for(FuncBlock b : blist)
{
if(b.name.equals("main"))
{
System.out.println(b.getHeader(true));
System.out.println(b.getInstructions(true));
System.out.println(" ret\n restore");
}
}
for(FuncBlock b : blist)
{
if(!b.name.equals("main"))
{
System.out.println(b.getHeader(true));
System.out.println(b.getInstructions(true));
System.out.println(" ret\n restore");
}
}
System.out.println();
System.out.println(".section\t\".rodata\"");
System.out.println(".align 8");
System.out.println(".LLC0:");
System.out.println(".asciz\t\"%d \"");
System.out.println(".align 8");
System.out.println(".LLC1:");
System.out.println(".asciz\t\"%d\\n\"");
System.out.println(".align 8");
System.out.println(".LLC2:");
System.out.println(".asciz\t\"%d\"");
System.out.println();
System.out.println(".common\t_read, 4, 4");
}
//RegisterAllocator ra = new RegisterAllocator(blist);
//ra.color();
}
|
diff --git a/src/com/dmdirc/updater/Update.java b/src/com/dmdirc/updater/Update.java
index 2eb217fa2..219d444b0 100644
--- a/src/com/dmdirc/updater/Update.java
+++ b/src/com/dmdirc/updater/Update.java
@@ -1,221 +1,221 @@
/*
* Copyright (c) 2006-2009 Chris Smith, Shane Mc Cormack, Gregory Holmes
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.dmdirc.updater;
import com.dmdirc.Main;
import com.dmdirc.interfaces.UpdateListener;
import com.dmdirc.logger.ErrorLevel;
import com.dmdirc.logger.Logger;
import com.dmdirc.util.DownloadListener;
import com.dmdirc.util.Downloader;
import com.dmdirc.util.WeakList;
import java.util.List;
/**
* Represents a single available update for some component.
*
* @author chris
*/
public final class Update implements DownloadListener {
/** Update component. */
private final UpdateComponent component;
/** Remote version name. */
private final String versionName;
/** Update url. */
private final String url;
/** The progress of the current stage. */
private float progress;
/** A list of registered update listeners. */
private final List<UpdateListener> listeners
= new WeakList<UpdateListener>();
/** Our current status. */
private UpdateStatus status = UpdateStatus.PENDING;
/**
* Creates a new instance of Update, with details from the specified line.
*
* @param updateInfo An update information line from the update server
*/
public Update(final String updateInfo) {
// outofdate client STABLE 20071007 0.5.1 file
final String[] parts = updateInfo.split(" ");
if (parts.length == 6) {
component = UpdateChecker.findComponent(parts[1]);
versionName = parts[4];
url = parts[5];
} else {
component = null;
versionName = null;
url = null;
Logger.appError(ErrorLevel.LOW,
"Invalid update line received from server: ",
new UnsupportedOperationException("line: " + updateInfo));
}
}
/**
* Retrieves the component that this update is for.
*
* @return The component of this update
*/
public UpdateComponent getComponent() {
return component;
}
/**
* Returns the remote version of the component that's available.
*
* @return The remote version number
*/
public String getRemoteVersion() {
return versionName;
}
/**
* Returns the URL where the new update may be downloaded.
*
* @return The URL of the update
*/
public String getUrl() {
return url;
}
/**
* Retrieves the status of this update.
*
* @return This update's status
*/
public UpdateStatus getStatus() {
return status;
}
/**
* Sets the status of this update, and notifies all listeners of the change.
*
* @param newStatus This update's new status
*/
protected void setStatus(final UpdateStatus newStatus) {
status = newStatus;
progress = 0;
for (UpdateListener listener : listeners) {
listener.updateStatusChange(this, status);
}
}
/**
* Removes the specified update listener.
*
* @param o The update listener to remove
*/
public void removeUpdateListener(final Object o) {
listeners.remove(o);
}
/**
* Adds the specified update listener.
*
* @param e The update listener to add
*/
public void addUpdateListener(final UpdateListener e) {
listeners.add(e);
}
/**
* Makes this update download and install itself.
*/
public void doUpdate() {
new Thread(new Runnable() {
/** {@inheritDoc} */
@Override
public void run() {
final String path = Main.getConfigDir() + "update.tmp."
+ Math.round(Math.random() * 1000);
setStatus(UpdateStatus.DOWNLOADING);
try {
Downloader.downloadPage(getUrl(), path, Update.this);
} catch (Throwable ex) {
setStatus(UpdateStatus.ERROR);
Logger.appError(ErrorLevel.MEDIUM, "Error when updating component "
+ component.getName(), ex);
return;
}
setStatus(UpdateStatus.INSTALLING);
try {
final boolean restart = getComponent().doInstall(path);
if (restart) {
setStatus(UpdateStatus.RESTART_NEEDED);
UpdateChecker.removeComponent(getComponent().getName());
} else {
setStatus(UpdateStatus.INSTALLED);
}
} catch (Throwable ex) {
setStatus(UpdateStatus.ERROR);
Logger.appError(ErrorLevel.MEDIUM,
- "Error when updating component " + component, ex);
+ "Error when updating component " + component.getName(), ex);
}
}
}, "Update thread").start();
}
/** {@inheritDoc} */
@Override
public void downloadProgress(final float percent) {
progress = percent;
for (UpdateListener listener : listeners) {
listener.updateProgressChange(this, percent);
}
}
/**
* Retrieves the current progress of the current state of this update.
*
* @return The percentage of the current stage that has been completed
*/
public float getProgress() {
return progress;
}
/** {@inheritDoc} */
@Override
public void setIndeterminate(boolean indeterminate) {
//TODO
}
}
| true
| true
|
public void doUpdate() {
new Thread(new Runnable() {
/** {@inheritDoc} */
@Override
public void run() {
final String path = Main.getConfigDir() + "update.tmp."
+ Math.round(Math.random() * 1000);
setStatus(UpdateStatus.DOWNLOADING);
try {
Downloader.downloadPage(getUrl(), path, Update.this);
} catch (Throwable ex) {
setStatus(UpdateStatus.ERROR);
Logger.appError(ErrorLevel.MEDIUM, "Error when updating component "
+ component.getName(), ex);
return;
}
setStatus(UpdateStatus.INSTALLING);
try {
final boolean restart = getComponent().doInstall(path);
if (restart) {
setStatus(UpdateStatus.RESTART_NEEDED);
UpdateChecker.removeComponent(getComponent().getName());
} else {
setStatus(UpdateStatus.INSTALLED);
}
} catch (Throwable ex) {
setStatus(UpdateStatus.ERROR);
Logger.appError(ErrorLevel.MEDIUM,
"Error when updating component " + component, ex);
}
}
}, "Update thread").start();
}
|
public void doUpdate() {
new Thread(new Runnable() {
/** {@inheritDoc} */
@Override
public void run() {
final String path = Main.getConfigDir() + "update.tmp."
+ Math.round(Math.random() * 1000);
setStatus(UpdateStatus.DOWNLOADING);
try {
Downloader.downloadPage(getUrl(), path, Update.this);
} catch (Throwable ex) {
setStatus(UpdateStatus.ERROR);
Logger.appError(ErrorLevel.MEDIUM, "Error when updating component "
+ component.getName(), ex);
return;
}
setStatus(UpdateStatus.INSTALLING);
try {
final boolean restart = getComponent().doInstall(path);
if (restart) {
setStatus(UpdateStatus.RESTART_NEEDED);
UpdateChecker.removeComponent(getComponent().getName());
} else {
setStatus(UpdateStatus.INSTALLED);
}
} catch (Throwable ex) {
setStatus(UpdateStatus.ERROR);
Logger.appError(ErrorLevel.MEDIUM,
"Error when updating component " + component.getName(), ex);
}
}
}, "Update thread").start();
}
|
diff --git a/src/main/java/fi/csc/microarray/client/visualisation/methods/HierarchicalClustering.java b/src/main/java/fi/csc/microarray/client/visualisation/methods/HierarchicalClustering.java
index 70d27e905..7a3e512e2 100644
--- a/src/main/java/fi/csc/microarray/client/visualisation/methods/HierarchicalClustering.java
+++ b/src/main/java/fi/csc/microarray/client/visualisation/methods/HierarchicalClustering.java
@@ -1,644 +1,647 @@
package fi.csc.microarray.client.visualisation.methods;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.geom.Rectangle2D;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import javax.swing.JCheckBox;
import javax.swing.JComponent;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTabbedPane;
import org.apache.log4j.Logger;
import org.jfree.chart.BioChartFactory;
import org.jfree.chart.ChartRenderingInfo;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.entity.EntityCollection;
import org.jfree.chart.entity.HCTreeNodeEntity;
import org.jfree.chart.entity.HeatMapBlockEntity;
import org.jfree.chart.event.ClusteringTreeChangeEvent;
import org.jfree.chart.event.PlotChangeEvent;
import org.jfree.chart.event.PlotChangeListener;
import org.jfree.chart.labels.StandardHCToolTipGenerator;
import org.jfree.chart.plot.GradientColorPalette;
import org.jfree.chart.plot.HCPlot;
import org.jfree.chart.plot.HCPlot.Selection;
import org.jfree.chart.title.TextTitle;
import org.jfree.data.hc.DataRange;
import org.jfree.data.hc.DataRangeMismatchException;
import org.jfree.data.hc.HCDataset;
import org.jfree.data.hc.HCTreeNode;
import org.jfree.data.hc.HeatMap;
import fi.csc.microarray.client.selection.SelectionEvent;
import fi.csc.microarray.client.selection.IntegratedSelectionManager;
import fi.csc.microarray.client.visualisation.AnnotateListPanel;
import fi.csc.microarray.client.visualisation.TableAnnotationProvider;
import fi.csc.microarray.client.visualisation.Visualisation;
import fi.csc.microarray.client.visualisation.VisualisationFrame;
import fi.csc.microarray.client.visualisation.methods.SelectableChartPanel.SelectionChangeListener;
import fi.csc.microarray.client.visualisation.methods.hc.OrderSuperviser;
import fi.csc.microarray.cluster.ClusterBranchNode;
import fi.csc.microarray.cluster.ClusterLeafNode;
import fi.csc.microarray.cluster.ClusterNode;
import fi.csc.microarray.cluster.ClusterParser;
import fi.csc.microarray.databeans.DataBean;
import fi.csc.microarray.databeans.DataBean.Link;
import fi.csc.microarray.databeans.DataBean.Traversal;
import fi.csc.microarray.databeans.features.QueryResult;
import fi.csc.microarray.databeans.features.Table;
import fi.csc.microarray.exception.ErrorReportAsException;
import fi.csc.microarray.exception.MicroarrayException;
import fi.csc.microarray.module.chipster.MicroarrayModule;
public class HierarchicalClustering extends Visualisation implements PropertyChangeListener, SelectionChangeListener {
public void initialise(VisualisationFrame frame) throws Exception {
super.initialise(frame);
}
protected SelectableChartPanel selectableChartPanel;
private static final Logger logger = Logger.getLogger(HierarchicalClustering.class);
OrderSuperviser orders;
private JPanel paramPanel;
private AnnotateListPanel list;
// Selected indexes in the order of parent data bean
protected Set<Integer> selected = new HashSet<Integer>();
protected DataBean selectionBean;
private HCPlot hcPlot;
private boolean reversed;
protected JPanel zoomChangerPanel;
protected JPanel spaceFiller;
protected JScrollPane scroller;
protected Dimension preferredSize;
private JCheckBox zoomCheckBox;
private class MicroarrayHCToolTipGenerator extends StandardHCToolTipGenerator {
/**
* Creates tooltip for Hierarchical clustering visualisation. The tooltip includes chip and gene names and value.
*
* The code is mainly copied from StandardHCToolTipGenerator
*
* Note! The rows and columns are in different order than in original version of this method. In the Viski library the rows and
* columns are messed up.
*/
@Override
public String generateToolTip(HeatMap heatmap, DataRange rowRange, DataRange columnRange) {
int minRow;
int maxRow;
int minColumn;
int maxColumn;
int rowCounter;
int columnCounter;
int blockCount;
double averageValue;
try {
minRow = rowRange.getLeftBound();
maxRow = rowRange.getRightBound();
minColumn = columnRange.getLeftBound();
maxColumn = columnRange.getRightBound();
} catch (Exception e) {
return "This block contains no data.";
}
if ((minRow == maxRow) && (minColumn == maxColumn)) {
return "(" + heatmap.getRowName(minRow) + "," + heatmap.getColumnName(minColumn) + ") = " + heatmap.get(minRow, minColumn);
}
for (averageValue = 0, blockCount = 0, rowCounter = minRow; rowCounter <= maxRow; rowCounter++) {
for (columnCounter = minColumn; columnCounter <= maxColumn; columnCounter++, blockCount++) {
averageValue += heatmap.get(rowCounter, columnCounter);
}
}
averageValue = averageValue / blockCount;
return "(" + heatmap.getRowName(minRow) + "," + heatmap.getColumnName(minColumn) + ") .. " + "(" + heatmap.getRowName(maxRow) + "," + heatmap.getColumnName(maxColumn) + ") = " + averageValue + " (contains " + (blockCount + 1) + " blocks)";
}
}
@Override
public JPanel getParameterPanel() {
if (paramPanel == null) {
paramPanel = new JPanel();
paramPanel.setPreferredSize(Visualisation.PARAMETER_SIZE);
paramPanel.setLayout(new BorderLayout());
JPanel settings = this.createSettingsPanel();
list = new AnnotateListPanel();
JTabbedPane tabPane = new JTabbedPane();
tabPane.addTab("Settings", settings);
tabPane.addTab("Selected", list);
paramPanel.add(tabPane, BorderLayout.CENTER);
}
return paramPanel;
}
public JPanel createSettingsPanel() {
JPanel settingsPanel = new JPanel();
settingsPanel.setLayout(new GridBagLayout());
settingsPanel.setPreferredSize(Visualisation.PARAMETER_SIZE);
zoomCheckBox = new JCheckBox("Fit to screen", false);
zoomCheckBox.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
setScaledMode(zoomCheckBox.isSelected());
}
});
GridBagConstraints c = new GridBagConstraints();
c.gridy = 0;
c.insets.set(10, 10, 0, 10);
c.anchor = GridBagConstraints.NORTHWEST;
c.fill = GridBagConstraints.HORIZONTAL;
c.weighty = 0;
c.weightx = 1.0;
settingsPanel.add(zoomCheckBox, c);
c.gridy++;
c.fill = GridBagConstraints.BOTH;
c.weighty = 1.0;
settingsPanel.add(new JPanel(), c);
return settingsPanel;
}
/**
* getVisualisation method has too modes. If the genes are clustered the mode is normal and in reversed mode the chips are clustered.
* Both modes are implemented in turns of couple rows which makes the structure very ugly and vulnerable for mistakes. Maybe some day
* there is time to restructure this method.
*
* @throws MicroarrayException
* is parsing or visualisation generation fails
*/
@Override
public JComponent getVisualisation(DataBean data) throws MicroarrayException {
try {
// First find a dataset to which user's selections are connected to.
// There is no point to connect selections to cluster tree data because it's not possible to visualise it
// with other visualisation methods. The parent that contains the expression values is used instead.
List<DataBean> selectionBeans = data.traverseLinks(new Link[] { Link.DERIVATION }, Traversal.DIRECT);
// First one is the correct one
if (selectionBeans.size() > 1) {
selectionBean = selectionBeans.get(1);
}
if (selectionBean == null) {
throw new ErrorReportAsException("Source dataset not found", "Hierarchical clustering " + "needs its source dataset.", " Select both HC and its source dataset by keeping \n" + "Ctrl key pressed and right click with mouse over one of them to create \n" + "derivation link from the original dataset to \n" + "clustered one.");
}
// Connect selections to correct dataset
TableAnnotationProvider annotationProvider = new TableAnnotationProvider(selectionBean);
// Create heatmap
QueryResult heatMapFeature = data.queryFeatures("/clusters/hierarchical/heatmap");
Table heatMapDataIterator = heatMapFeature.asTable();
// Count heatmap rows
int rowCount = 0;
while (heatMapDataIterator.nextRow()) {
rowCount++;
}
// Count columns that contain expression values
LinkedList<String> columns = new LinkedList<String>();
for (String columnName : heatMapDataIterator.getColumnNames()) {
if (columnName.startsWith("chip.")) {
columns.add(columnName);
} else {
logger.debug("Column skipped in HC: " + columnName);
}
}
int columnCount = columns.size();
// Parse HC tree and check which way we have clustered
String hcTree = data.queryFeatures("/clusters/hierarchical/tree").asStrings().iterator().next();
ClusterBranchNode tree = new ClusterParser(hcTree).getTree();
this.reversed = hcTree.contains("chip.");
// Adjust gene count for sampling
HeatMap heatMap = null;
+ int dataCount;
if (!reversed) {
rowCount = tree.getLeafCount(); // heatmap has more genes than tree (sampling done), correct for it
heatMap = new HeatMap("Heatmap", rowCount, columnCount);
+ dataCount = rowCount;
} else {
heatMap = new HeatMap("Heatmap", columnCount, rowCount);
+ dataCount = columnCount;
}
// Go through the tree to find its biggest height
int initialHeight = getTreeHeight(tree);
// Read the tree and fill the treeToId map
List<String> treeToId = new ArrayList<String>();
- treeToId.addAll(Collections.nCopies(rowCount, (String) null));
+ treeToId.addAll(Collections.nCopies(dataCount, (String) null));
HCTreeNode root = readTree(tree, 0, initialHeight, treeToId);
orders = new OrderSuperviser();
orders.setTreeToId(treeToId);
Table heatMapData = data.queryFeatures("/clusters/hierarchical/heatmap").asTable();
List<Integer> treeToBean = new ArrayList<Integer>();
treeToBean.addAll(Collections.nCopies(rowCount, -1));
int row = -1; // This is increased to 0 in the beginning of the
// loop
int originalRow = 0;
while (heatMapData.nextRow()) {
if (!reversed) {
// Find the row number in heatMap corresponding the name of
// this row
String key = translate(heatMapData.getStringValue(" "));
if (orders.idToTree(key) != -1) { // if the id is found
row = orders.idToTree(key);
treeToBean.set(row, originalRow);
originalRow++;
} else {
continue;
}
logger.debug("Adding a new row to heatmap, name: " + heatMapData.getStringValue(" ") + "\tto row: " + row);
} else {
// reversed row is a column, just use the order from the
// iteration
row++;
}
String geneName = heatMapData.getStringValue(" ");
geneName = annotationProvider.getAnnotatedRowname(geneName);
if (!reversed) {
heatMap.setRowName(row, geneName);
} else {
heatMap.setColumnName(row, geneName);
}
int i = -1;
for (String columnName : columns) {
if (!reversed) {
// column index, just use the order from the iteration
i++;
} else {
i = orders.idToTree(columnName);
logger.debug("Adding a new row to heatmap (reversed), name: " + columnName + "\tto row: " + i);
}
if (!reversed) {
heatMap.update(row, i, heatMapData.getFloatValue(columnName));
} else {
heatMap.update(i, row, heatMapData.getFloatValue(columnName));
}
}
}
orders.setTreeToBean(treeToBean);
// Set column names (row names if reversed)
int i = -1; // increased once before action
for (String columnName : columns) {
String sampleName = columnName.substring("chip.".length());
String realName = data.queryFeatures("/phenodata/linked/describe/" + sampleName).asString();
if (!reversed) {
// column index, just use the order from the iteration
i++;
} else {
i = orders.idToTree(columnName);
logger.debug("Adding a new row to heatmap (reversed), name: " + columnName + "\tto row: " + i);
}
if (!reversed) {
heatMap.setColumnName(i, realName);
} else {
heatMap.setRowName(i, realName);
}
}
HCDataset dataset = new HCDataset(heatMap, root, null);
// create the chart...
boolean tooltips = true;
JFreeChart chart = BioChartFactory.createHCChart("Hierarchical Clustering", // chart
// title
dataset, // data
tooltips, // tooltips?
false // URLs?
);
// set special tooltips to hcChart
if (chart.getPlot() instanceof HCPlot) {
HCPlot hcPlot = (HCPlot) chart.getPlot();
this.hcPlot = hcPlot;
orders.setPlot(hcPlot);
this.hcPlot.addChangeListener(new PlotChangeListener() {
public void plotChanged(PlotChangeEvent event) {
if (event instanceof ClusteringTreeChangeEvent) {
HierarchicalClustering.this.orders.updateVisibleIndexes();
HierarchicalClustering.this.updateSelectionsFromApplication(false);
}
}
});
// Set tooltips
if (tooltips) {
hcPlot.setToolTipGenerator(new MicroarrayHCToolTipGenerator());
}
// Colors
double min = Heatmap.getMinValue(dataset.getHeatMap());
double max = Heatmap.getMaxValue(dataset.getHeatMap());
GradientColorPalette colors = new GradientColorPalette(new double[] { min, max }, new Color[] { Color.BLUE, Color.BLACK, Color.RED });
hcPlot.setColoring(colors);
}
chart.setTitle((TextTitle) null);
selectableChartPanel = new SelectableChartPanel(chart, this, false);
selectableChartPanel.getChartPanel().addChartMouseListener((HCPlot) chart.getPlot());
updateSelectionsFromApplication(false);
application.addClientEventListener(this);
int blockSize = 10;
int width = (int) (heatMap.getColumnsCount() * blockSize + hcPlot.getRowTreeSize() + hcPlot.getRowNamesSize() + hcPlot.getLeftMarginSize() + hcPlot.getRightMarginSize());
// Column tree not visible
int height = (int) (heatMap.getRowCount() * blockSize + hcPlot.getColumnNamesSize() + hcPlot.getTopMarginSize() + hcPlot.getBottomMarginSize());
preferredSize = new Dimension(width, height);
zoomChangerPanel = new JPanel(new BorderLayout());
spaceFiller = new JPanel();
((FlowLayout) spaceFiller.getLayout()).setAlignment(FlowLayout.LEFT);
spaceFiller.setBackground(Color.white);
scroller = new JScrollPane(spaceFiller);
setScaledMode(false);
return zoomChangerPanel;
} catch (Exception e) {
// these are very tricky, mostly caused by bad data
logger.error(e); // log actual cause
throw new ErrorReportAsException("Hierarchical clustering cannot be shown.", "The problem is probably caused by unsupported data, such as gene names that have illegal characters in them.", e);
}
}
public void setScaledMode(boolean scaled) {
/*
* Ugly way to change zoom level by changing containing panel layout and scroller existence, but JFreeChart scaling is little bit
* problematic in this kind of usage.
*/
if (scaled) {
spaceFiller.remove(selectableChartPanel);
zoomChangerPanel.remove(scroller);
zoomChangerPanel.add(selectableChartPanel, BorderLayout.CENTER);
selectableChartPanel.setPreferredSize(null);
} else {
spaceFiller.add(selectableChartPanel);
zoomChangerPanel.remove(selectableChartPanel);
zoomChangerPanel.add(scroller, BorderLayout.CENTER);
selectableChartPanel.setPreferredSize(preferredSize);
}
zoomChangerPanel.validate();
zoomChangerPanel.repaint();
}
/**
* Translates name to parenthesis tree format. This translation is required because R script does it.
*/
private String translate(String gene) {
while (gene.startsWith(" ") || gene.startsWith("(")) {
gene = gene.substring(1);
}
while (gene.endsWith(" ") || gene.endsWith(")")) {
gene = gene.substring(0, gene.length() - 1);
}
gene = gene.replace("(", "-").replace(")", "-").replace(" ", "_");
while (gene.contains("--")) {
gene = gene.replace("--", "-");
}
return gene;
}
private int getTreeHeight(ClusterNode tree) {
if (tree instanceof ClusterLeafNode) {
return 0;
} else {
int left = getTreeHeight(((ClusterBranchNode) tree).getLeftBranch()) + 1;
int right = getTreeHeight(((ClusterBranchNode) tree).getRightBranch()) + 1;
return left > right ? left : right;
}
}
private HCTreeNode readTree(ClusterNode tree, int index, int height, List<String> treeToId) throws DataRangeMismatchException {
if (tree instanceof ClusterLeafNode) {
String gene = ((ClusterLeafNode) tree).getGene();
treeToId.set(index, gene);
logger.debug("LeafNode: " + ((ClusterLeafNode) tree).getGene() + "in index: " + index);
return new HCTreeNode(0, index); // height is zero
} else {
HCTreeNode node = new HCTreeNode(height);
HCTreeNode leftChild = readTree(((ClusterBranchNode) tree).getLeftBranch(), index, height - 1, treeToId);
node.setLeftChild(leftChild);
HCTreeNode rightChild = readTree(((ClusterBranchNode) tree).getRightBranch(), node.getDataRange().getRightBound() + 1, height - 1, treeToId);
node.setRightChild(rightChild);
return node;
}
}
@Override
public boolean canVisualise(DataBean bean) throws MicroarrayException {
DataBean parentBean = MicroarrayModule.getProperSource(bean);
return bean.isContentTypeCompatitible("application/x-treeview") && parentBean != null && parentBean.hasTypeTag(MicroarrayModule.TypeTags.NORMALISED_EXPRESSION_VALUES);
}
public void selectionChanged(Rectangle2D.Double selectionRect) {
if (selectionRect == null) {
selected.clear();
} else {
orders.updateVisibleIndexes();
ChartRenderingInfo info = selectableChartPanel.getChartPanel().getChartRenderingInfo();
EntityCollection entities = info.getEntityCollection();
Set<Integer> newSelection = new HashSet<Integer>();
for (Object obj : entities.getEntities()) {
// Don't clear the selection if tree was clicked
if (obj instanceof HCTreeNodeEntity) {
HCTreeNodeEntity entity = (HCTreeNodeEntity) obj;
if (entity.getArea().intersects(selectionRect)) {
return;
}
}
if (obj instanceof HeatMapBlockEntity) {
HeatMapBlockEntity entity = (HeatMapBlockEntity) obj;
if (entity.getArea().intersects(selectionRect)) {
if (!reversed) {
newSelection.addAll(orders.visibleToBean(entity.getRow()));
} else {
newSelection.add(entity.getColumn());
}
}
}
}
// New selections can't be put directly to selectedIndexes, because every other occurrence
// of block (several in one line) inside selection rectangle would undo selection
for (Integer row : newSelection) {
if (selected.contains(row)) {
selected.remove(row);
} else {
selected.add(row);
}
}
showSelection(true);
}
}
public void propertyChange(PropertyChangeEvent evt) {
if (evt instanceof SelectionEvent && evt.getSource() != this && ((SelectionEvent) evt).getData() == selectionBean) {
updateSelectionsFromApplication(false);
}
}
protected void updateSelectionsFromApplication(boolean dispatchEvent) {
IntegratedSelectionManager manager = application.getSelectionManager().getSelectionManager(selectionBean);
orders.updateVisibleIndexes();
selected.clear();
for (int i : manager.getSelectionAsRows()) {
selected.add(i);
}
showSelection(dispatchEvent);
}
protected void showSelection(boolean dispatchEvent) {
Selection[] detailedSelection;
if (!reversed) {
detailedSelection = calculateRowSelectionDetails();
} else {
detailedSelection = calculateColumnSelectionDetails();
}
hcPlot.showSelection(detailedSelection, !reversed);
list.setSelectedRows(selected, this, dispatchEvent, selectionBean);
}
private Selection[] calculateRowSelectionDetails() {
// Number of rows represented by each visible row
int[] closedRows = orders.getCountOfVisibleReferences();
// Number of selected in each visible row
int[] selectedRows = new int[closedRows.length];
// Is each visible row fully, partially or not at all selected
Selection[] detailedSelection = new Selection[closedRows.length];
for (int selectedRow : selected) {
selectedRows[orders.beanToVisible(selectedRow)]++;
}
for (int i = 0; i < selectedRows.length; i++) {
if (selectedRows[i] == 0) {
detailedSelection[i] = Selection.NO;
} else if (selectedRows[i] == closedRows[i]) {
detailedSelection[i] = Selection.YES;
} else if (selectedRows[i] < closedRows[i]) {
detailedSelection[i] = Selection.PARTIAL;
}
}
return detailedSelection;
}
private Selection[] calculateColumnSelectionDetails() {
// Columns are already in right order, so just convert the selections to right format
Selection[] detailedSelection = new Selection[hcPlot.getDataset().getHeatMap().getColumnsCount()];
for (int i = 0; i < detailedSelection.length; i++) {
if (selected.contains(i)) {
detailedSelection[i] = Selection.YES;
} else {
detailedSelection[i] = Selection.NO;
}
}
return detailedSelection;
}
}
| false
| true
|
public JComponent getVisualisation(DataBean data) throws MicroarrayException {
try {
// First find a dataset to which user's selections are connected to.
// There is no point to connect selections to cluster tree data because it's not possible to visualise it
// with other visualisation methods. The parent that contains the expression values is used instead.
List<DataBean> selectionBeans = data.traverseLinks(new Link[] { Link.DERIVATION }, Traversal.DIRECT);
// First one is the correct one
if (selectionBeans.size() > 1) {
selectionBean = selectionBeans.get(1);
}
if (selectionBean == null) {
throw new ErrorReportAsException("Source dataset not found", "Hierarchical clustering " + "needs its source dataset.", " Select both HC and its source dataset by keeping \n" + "Ctrl key pressed and right click with mouse over one of them to create \n" + "derivation link from the original dataset to \n" + "clustered one.");
}
// Connect selections to correct dataset
TableAnnotationProvider annotationProvider = new TableAnnotationProvider(selectionBean);
// Create heatmap
QueryResult heatMapFeature = data.queryFeatures("/clusters/hierarchical/heatmap");
Table heatMapDataIterator = heatMapFeature.asTable();
// Count heatmap rows
int rowCount = 0;
while (heatMapDataIterator.nextRow()) {
rowCount++;
}
// Count columns that contain expression values
LinkedList<String> columns = new LinkedList<String>();
for (String columnName : heatMapDataIterator.getColumnNames()) {
if (columnName.startsWith("chip.")) {
columns.add(columnName);
} else {
logger.debug("Column skipped in HC: " + columnName);
}
}
int columnCount = columns.size();
// Parse HC tree and check which way we have clustered
String hcTree = data.queryFeatures("/clusters/hierarchical/tree").asStrings().iterator().next();
ClusterBranchNode tree = new ClusterParser(hcTree).getTree();
this.reversed = hcTree.contains("chip.");
// Adjust gene count for sampling
HeatMap heatMap = null;
if (!reversed) {
rowCount = tree.getLeafCount(); // heatmap has more genes than tree (sampling done), correct for it
heatMap = new HeatMap("Heatmap", rowCount, columnCount);
} else {
heatMap = new HeatMap("Heatmap", columnCount, rowCount);
}
// Go through the tree to find its biggest height
int initialHeight = getTreeHeight(tree);
// Read the tree and fill the treeToId map
List<String> treeToId = new ArrayList<String>();
treeToId.addAll(Collections.nCopies(rowCount, (String) null));
HCTreeNode root = readTree(tree, 0, initialHeight, treeToId);
orders = new OrderSuperviser();
orders.setTreeToId(treeToId);
Table heatMapData = data.queryFeatures("/clusters/hierarchical/heatmap").asTable();
List<Integer> treeToBean = new ArrayList<Integer>();
treeToBean.addAll(Collections.nCopies(rowCount, -1));
int row = -1; // This is increased to 0 in the beginning of the
// loop
int originalRow = 0;
while (heatMapData.nextRow()) {
if (!reversed) {
// Find the row number in heatMap corresponding the name of
// this row
String key = translate(heatMapData.getStringValue(" "));
if (orders.idToTree(key) != -1) { // if the id is found
row = orders.idToTree(key);
treeToBean.set(row, originalRow);
originalRow++;
} else {
continue;
}
logger.debug("Adding a new row to heatmap, name: " + heatMapData.getStringValue(" ") + "\tto row: " + row);
} else {
// reversed row is a column, just use the order from the
// iteration
row++;
}
String geneName = heatMapData.getStringValue(" ");
geneName = annotationProvider.getAnnotatedRowname(geneName);
if (!reversed) {
heatMap.setRowName(row, geneName);
} else {
heatMap.setColumnName(row, geneName);
}
int i = -1;
for (String columnName : columns) {
if (!reversed) {
// column index, just use the order from the iteration
i++;
} else {
i = orders.idToTree(columnName);
logger.debug("Adding a new row to heatmap (reversed), name: " + columnName + "\tto row: " + i);
}
if (!reversed) {
heatMap.update(row, i, heatMapData.getFloatValue(columnName));
} else {
heatMap.update(i, row, heatMapData.getFloatValue(columnName));
}
}
}
orders.setTreeToBean(treeToBean);
// Set column names (row names if reversed)
int i = -1; // increased once before action
for (String columnName : columns) {
String sampleName = columnName.substring("chip.".length());
String realName = data.queryFeatures("/phenodata/linked/describe/" + sampleName).asString();
if (!reversed) {
// column index, just use the order from the iteration
i++;
} else {
i = orders.idToTree(columnName);
logger.debug("Adding a new row to heatmap (reversed), name: " + columnName + "\tto row: " + i);
}
if (!reversed) {
heatMap.setColumnName(i, realName);
} else {
heatMap.setRowName(i, realName);
}
}
HCDataset dataset = new HCDataset(heatMap, root, null);
// create the chart...
boolean tooltips = true;
JFreeChart chart = BioChartFactory.createHCChart("Hierarchical Clustering", // chart
// title
dataset, // data
tooltips, // tooltips?
false // URLs?
);
// set special tooltips to hcChart
if (chart.getPlot() instanceof HCPlot) {
HCPlot hcPlot = (HCPlot) chart.getPlot();
this.hcPlot = hcPlot;
orders.setPlot(hcPlot);
this.hcPlot.addChangeListener(new PlotChangeListener() {
public void plotChanged(PlotChangeEvent event) {
if (event instanceof ClusteringTreeChangeEvent) {
HierarchicalClustering.this.orders.updateVisibleIndexes();
HierarchicalClustering.this.updateSelectionsFromApplication(false);
}
}
});
// Set tooltips
if (tooltips) {
hcPlot.setToolTipGenerator(new MicroarrayHCToolTipGenerator());
}
// Colors
double min = Heatmap.getMinValue(dataset.getHeatMap());
double max = Heatmap.getMaxValue(dataset.getHeatMap());
GradientColorPalette colors = new GradientColorPalette(new double[] { min, max }, new Color[] { Color.BLUE, Color.BLACK, Color.RED });
hcPlot.setColoring(colors);
}
chart.setTitle((TextTitle) null);
selectableChartPanel = new SelectableChartPanel(chart, this, false);
selectableChartPanel.getChartPanel().addChartMouseListener((HCPlot) chart.getPlot());
updateSelectionsFromApplication(false);
application.addClientEventListener(this);
int blockSize = 10;
int width = (int) (heatMap.getColumnsCount() * blockSize + hcPlot.getRowTreeSize() + hcPlot.getRowNamesSize() + hcPlot.getLeftMarginSize() + hcPlot.getRightMarginSize());
// Column tree not visible
int height = (int) (heatMap.getRowCount() * blockSize + hcPlot.getColumnNamesSize() + hcPlot.getTopMarginSize() + hcPlot.getBottomMarginSize());
preferredSize = new Dimension(width, height);
zoomChangerPanel = new JPanel(new BorderLayout());
spaceFiller = new JPanel();
((FlowLayout) spaceFiller.getLayout()).setAlignment(FlowLayout.LEFT);
spaceFiller.setBackground(Color.white);
scroller = new JScrollPane(spaceFiller);
setScaledMode(false);
return zoomChangerPanel;
} catch (Exception e) {
// these are very tricky, mostly caused by bad data
logger.error(e); // log actual cause
throw new ErrorReportAsException("Hierarchical clustering cannot be shown.", "The problem is probably caused by unsupported data, such as gene names that have illegal characters in them.", e);
}
}
|
public JComponent getVisualisation(DataBean data) throws MicroarrayException {
try {
// First find a dataset to which user's selections are connected to.
// There is no point to connect selections to cluster tree data because it's not possible to visualise it
// with other visualisation methods. The parent that contains the expression values is used instead.
List<DataBean> selectionBeans = data.traverseLinks(new Link[] { Link.DERIVATION }, Traversal.DIRECT);
// First one is the correct one
if (selectionBeans.size() > 1) {
selectionBean = selectionBeans.get(1);
}
if (selectionBean == null) {
throw new ErrorReportAsException("Source dataset not found", "Hierarchical clustering " + "needs its source dataset.", " Select both HC and its source dataset by keeping \n" + "Ctrl key pressed and right click with mouse over one of them to create \n" + "derivation link from the original dataset to \n" + "clustered one.");
}
// Connect selections to correct dataset
TableAnnotationProvider annotationProvider = new TableAnnotationProvider(selectionBean);
// Create heatmap
QueryResult heatMapFeature = data.queryFeatures("/clusters/hierarchical/heatmap");
Table heatMapDataIterator = heatMapFeature.asTable();
// Count heatmap rows
int rowCount = 0;
while (heatMapDataIterator.nextRow()) {
rowCount++;
}
// Count columns that contain expression values
LinkedList<String> columns = new LinkedList<String>();
for (String columnName : heatMapDataIterator.getColumnNames()) {
if (columnName.startsWith("chip.")) {
columns.add(columnName);
} else {
logger.debug("Column skipped in HC: " + columnName);
}
}
int columnCount = columns.size();
// Parse HC tree and check which way we have clustered
String hcTree = data.queryFeatures("/clusters/hierarchical/tree").asStrings().iterator().next();
ClusterBranchNode tree = new ClusterParser(hcTree).getTree();
this.reversed = hcTree.contains("chip.");
// Adjust gene count for sampling
HeatMap heatMap = null;
int dataCount;
if (!reversed) {
rowCount = tree.getLeafCount(); // heatmap has more genes than tree (sampling done), correct for it
heatMap = new HeatMap("Heatmap", rowCount, columnCount);
dataCount = rowCount;
} else {
heatMap = new HeatMap("Heatmap", columnCount, rowCount);
dataCount = columnCount;
}
// Go through the tree to find its biggest height
int initialHeight = getTreeHeight(tree);
// Read the tree and fill the treeToId map
List<String> treeToId = new ArrayList<String>();
treeToId.addAll(Collections.nCopies(dataCount, (String) null));
HCTreeNode root = readTree(tree, 0, initialHeight, treeToId);
orders = new OrderSuperviser();
orders.setTreeToId(treeToId);
Table heatMapData = data.queryFeatures("/clusters/hierarchical/heatmap").asTable();
List<Integer> treeToBean = new ArrayList<Integer>();
treeToBean.addAll(Collections.nCopies(rowCount, -1));
int row = -1; // This is increased to 0 in the beginning of the
// loop
int originalRow = 0;
while (heatMapData.nextRow()) {
if (!reversed) {
// Find the row number in heatMap corresponding the name of
// this row
String key = translate(heatMapData.getStringValue(" "));
if (orders.idToTree(key) != -1) { // if the id is found
row = orders.idToTree(key);
treeToBean.set(row, originalRow);
originalRow++;
} else {
continue;
}
logger.debug("Adding a new row to heatmap, name: " + heatMapData.getStringValue(" ") + "\tto row: " + row);
} else {
// reversed row is a column, just use the order from the
// iteration
row++;
}
String geneName = heatMapData.getStringValue(" ");
geneName = annotationProvider.getAnnotatedRowname(geneName);
if (!reversed) {
heatMap.setRowName(row, geneName);
} else {
heatMap.setColumnName(row, geneName);
}
int i = -1;
for (String columnName : columns) {
if (!reversed) {
// column index, just use the order from the iteration
i++;
} else {
i = orders.idToTree(columnName);
logger.debug("Adding a new row to heatmap (reversed), name: " + columnName + "\tto row: " + i);
}
if (!reversed) {
heatMap.update(row, i, heatMapData.getFloatValue(columnName));
} else {
heatMap.update(i, row, heatMapData.getFloatValue(columnName));
}
}
}
orders.setTreeToBean(treeToBean);
// Set column names (row names if reversed)
int i = -1; // increased once before action
for (String columnName : columns) {
String sampleName = columnName.substring("chip.".length());
String realName = data.queryFeatures("/phenodata/linked/describe/" + sampleName).asString();
if (!reversed) {
// column index, just use the order from the iteration
i++;
} else {
i = orders.idToTree(columnName);
logger.debug("Adding a new row to heatmap (reversed), name: " + columnName + "\tto row: " + i);
}
if (!reversed) {
heatMap.setColumnName(i, realName);
} else {
heatMap.setRowName(i, realName);
}
}
HCDataset dataset = new HCDataset(heatMap, root, null);
// create the chart...
boolean tooltips = true;
JFreeChart chart = BioChartFactory.createHCChart("Hierarchical Clustering", // chart
// title
dataset, // data
tooltips, // tooltips?
false // URLs?
);
// set special tooltips to hcChart
if (chart.getPlot() instanceof HCPlot) {
HCPlot hcPlot = (HCPlot) chart.getPlot();
this.hcPlot = hcPlot;
orders.setPlot(hcPlot);
this.hcPlot.addChangeListener(new PlotChangeListener() {
public void plotChanged(PlotChangeEvent event) {
if (event instanceof ClusteringTreeChangeEvent) {
HierarchicalClustering.this.orders.updateVisibleIndexes();
HierarchicalClustering.this.updateSelectionsFromApplication(false);
}
}
});
// Set tooltips
if (tooltips) {
hcPlot.setToolTipGenerator(new MicroarrayHCToolTipGenerator());
}
// Colors
double min = Heatmap.getMinValue(dataset.getHeatMap());
double max = Heatmap.getMaxValue(dataset.getHeatMap());
GradientColorPalette colors = new GradientColorPalette(new double[] { min, max }, new Color[] { Color.BLUE, Color.BLACK, Color.RED });
hcPlot.setColoring(colors);
}
chart.setTitle((TextTitle) null);
selectableChartPanel = new SelectableChartPanel(chart, this, false);
selectableChartPanel.getChartPanel().addChartMouseListener((HCPlot) chart.getPlot());
updateSelectionsFromApplication(false);
application.addClientEventListener(this);
int blockSize = 10;
int width = (int) (heatMap.getColumnsCount() * blockSize + hcPlot.getRowTreeSize() + hcPlot.getRowNamesSize() + hcPlot.getLeftMarginSize() + hcPlot.getRightMarginSize());
// Column tree not visible
int height = (int) (heatMap.getRowCount() * blockSize + hcPlot.getColumnNamesSize() + hcPlot.getTopMarginSize() + hcPlot.getBottomMarginSize());
preferredSize = new Dimension(width, height);
zoomChangerPanel = new JPanel(new BorderLayout());
spaceFiller = new JPanel();
((FlowLayout) spaceFiller.getLayout()).setAlignment(FlowLayout.LEFT);
spaceFiller.setBackground(Color.white);
scroller = new JScrollPane(spaceFiller);
setScaledMode(false);
return zoomChangerPanel;
} catch (Exception e) {
// these are very tricky, mostly caused by bad data
logger.error(e); // log actual cause
throw new ErrorReportAsException("Hierarchical clustering cannot be shown.", "The problem is probably caused by unsupported data, such as gene names that have illegal characters in them.", e);
}
}
|
diff --git a/src/main/java/org/tongji/mahoutplatform/recommender/evaluation/AbstractKFoldCrossRecommenderEvaluator.java b/src/main/java/org/tongji/mahoutplatform/recommender/evaluation/AbstractKFoldCrossRecommenderEvaluator.java
index ae1d5a1..7771236 100644
--- a/src/main/java/org/tongji/mahoutplatform/recommender/evaluation/AbstractKFoldCrossRecommenderEvaluator.java
+++ b/src/main/java/org/tongji/mahoutplatform/recommender/evaluation/AbstractKFoldCrossRecommenderEvaluator.java
@@ -1,452 +1,452 @@
package org.tongji.mahoutplatform.recommender.evaluation;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.mahout.cf.taste.common.NoSuchItemException;
import org.apache.mahout.cf.taste.common.NoSuchUserException;
import org.apache.mahout.cf.taste.common.TasteException;
import org.apache.mahout.cf.taste.eval.DataModelBuilder;
import org.apache.mahout.cf.taste.eval.RecommenderBuilder;
import org.apache.mahout.cf.taste.impl.common.FastByIDMap;
import org.apache.mahout.cf.taste.impl.common.FullRunningAverageAndStdDev;
import org.apache.mahout.cf.taste.impl.common.RunningAverageAndStdDev;
import org.apache.mahout.cf.taste.impl.model.GenericDataModel;
import org.apache.mahout.cf.taste.impl.model.GenericUserPreferenceArray;
import org.apache.mahout.cf.taste.model.DataModel;
import org.apache.mahout.cf.taste.model.Preference;
import org.apache.mahout.cf.taste.model.PreferenceArray;
import org.apache.mahout.cf.taste.recommender.Recommender;
import org.apache.mahout.common.RandomUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tongji.mahoutplatform.recommender.data.KFoldCrossFileDataModel;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
public abstract class AbstractKFoldCrossRecommenderEvaluator {
private static final Logger log = LoggerFactory.getLogger(AbstractKFoldCrossRecommenderEvaluator.class);
private final Random random;
private float maxPreference;
private float minPreference;
private int kFold;
public AbstractKFoldCrossRecommenderEvaluator() {
random = RandomUtils.getRandom();
maxPreference = Float.NaN;
minPreference = Float.NaN;
}
public final float getMaxPreference() {
return maxPreference;
}
public final void setMaxPreference(float maxPreference) {
this.maxPreference = maxPreference;
}
public final float getMinPreference() {
return minPreference;
}
public final void setMinPreference(float minPreference) {
this.minPreference = minPreference;
}
public double evaluate(RecommenderBuilder recommenderBuilder,
DataModelBuilder dataModelBuilder,
DataModel dataModel,
int kFold) throws TasteException{
return this.evaluate(recommenderBuilder, dataModelBuilder, dataModel, kFold, 1.0);
}
public double evaluate(RecommenderBuilder recommenderBuilder,
DataModelBuilder dataModelBuilder, DataModel dataModel, int kFold,
double evaluationPercentage) throws TasteException {
Preconditions.checkNotNull(recommenderBuilder);
Preconditions.checkNotNull(dataModel);
Preconditions.checkArgument(kFold > 0, "Invalid kFold: " + kFold);
Preconditions.checkArgument(evaluationPercentage >= 0.0
&& evaluationPercentage <= 1.0,
"Invalid evaluationPercentage: " + evaluationPercentage);
this.kFold = kFold;
log.info("Beginning evaluation using {} FoldCross of {}", kFold,
dataModel);
int numPrefs = ((KFoldCrossFileDataModel) dataModel).getNumPrefs();
int eachFoldNumPrefs = numPrefs / kFold;
ArrayList<Preference> allPrefs = (ArrayList<Preference>) ((KFoldCrossFileDataModel) dataModel)
.getAllPrefs();
int numUsers = dataModel.getNumUsers();
List<FastByIDMap<PreferenceArray>> allTrainingPrefs = new ArrayList<FastByIDMap<PreferenceArray>>();
for (int i = 0; i < kFold; i++) {
allTrainingPrefs.add(new FastByIDMap<PreferenceArray>(
1 + (int) (evaluationPercentage * numUsers)));
}
FastByIDMap<PreferenceArray> testPrefs = null;
int randomNums[] = generateRandomNums(numPrefs);
for (int i = 0; i < kFold; i++) {
for (int j = i * eachFoldNumPrefs; j < ((i + 1) * eachFoldNumPrefs); j++) {
Preference pref = allPrefs.get(randomNums[j]);
long userID = pref.getUserID();
long itemID = pref.getItemID();
if (allTrainingPrefs.get(i).containsKey(userID)) {
PreferenceArray prefArray = allTrainingPrefs.get(i).get(
userID);
PreferenceArray newPrefArray = new GenericUserPreferenceArray(
prefArray.length() + 1);
for (int k = 0; k < prefArray.length(); k++) {
newPrefArray.setItemID(k, prefArray.getItemID(k));
newPrefArray.setValue(k, prefArray.getValue(k));
}
newPrefArray.setUserID(0, userID);
newPrefArray.setItemID(prefArray.length(), itemID);
newPrefArray.setValue(prefArray.length(), pref.getValue());
allTrainingPrefs.get(i).remove(userID);
allTrainingPrefs.get(i).put(userID, newPrefArray);
} else {
PreferenceArray prefArray = new GenericUserPreferenceArray(
1);
prefArray.setUserID(0, userID);
prefArray.setItemID(0, itemID);
prefArray.setValue(0, pref.getValue());
allTrainingPrefs.get(i).put(userID, prefArray);
}
}
}
- for(int i = (kFold - 1) * eachFoldNumPrefs; i < numPrefs; i++){
+ for(int i = (kFold * eachFoldNumPrefs); i < numPrefs; i++){
Preference pref = allPrefs.get(randomNums[i]);
long userID = pref.getUserID();
long itemID = pref.getItemID();
FastByIDMap<PreferenceArray> trainingPrefs = allTrainingPrefs.get(kFold - 1);
if(trainingPrefs.containsKey(userID)){
PreferenceArray prefArray = trainingPrefs.get(userID);
PreferenceArray newPrefArray = new GenericUserPreferenceArray(prefArray.length() + 1);
for(int k = 0; k < prefArray.length(); k++){
newPrefArray.setItemID(k, prefArray.getItemID(k));
newPrefArray.setValue(k, prefArray.getValue(k));
}
newPrefArray.setUserID(0, userID);
newPrefArray.setItemID(prefArray.length(), itemID);
newPrefArray.setValue(prefArray.length(), pref.getValue());
trainingPrefs.remove(userID);
trainingPrefs.put(userID, newPrefArray);
}else{
PreferenceArray prefArray = new GenericUserPreferenceArray(1);
prefArray.setUserID(0, userID);
prefArray.setItemID(0, itemID);
prefArray.setValue(0, pref.getValue());
trainingPrefs.put(userID, prefArray);
}
}
double result = 0;
for(int i = 0; i < kFold; i++){
testPrefs = allTrainingPrefs.get(i);
FastByIDMap<PreferenceArray> trainingPrefs = new FastByIDMap<PreferenceArray>(
1 + (int) (evaluationPercentage * numUsers));
for(int j = 0; j < kFold; j++){
if(j != i){
FastByIDMap<PreferenceArray> trainingPrefsPart = allTrainingPrefs.get(j);
for(Map.Entry<Long, PreferenceArray> entry : trainingPrefsPart.entrySet()){
Long userID = entry.getKey();
PreferenceArray prefArray = entry.getValue();
if(trainingPrefs.containsKey(userID)){
PreferenceArray originalPrefArray = trainingPrefs.get(userID);
PreferenceArray newPrefArray = new GenericUserPreferenceArray(originalPrefArray.length() + prefArray.length());
newPrefArray.setUserID(0, userID);
for(int k = 0; k < originalPrefArray.length(); k++){
newPrefArray.setItemID(k, originalPrefArray.getItemID(k));
newPrefArray.setValue(k, originalPrefArray.getValue(k));
}
for(int k = originalPrefArray.length(); k < (prefArray.length() + originalPrefArray.length()); k++){
newPrefArray.setItemID(k, prefArray.getItemID(k - originalPrefArray.length()));
newPrefArray.setValue(k, prefArray.getValue(k - originalPrefArray.length()));
}
trainingPrefs.remove(userID);
trainingPrefs.put(userID, newPrefArray);
}else{
trainingPrefs.put(userID, prefArray);
}
}
}
}
DataModel trainingModel = dataModelBuilder == null ? new GenericDataModel(trainingPrefs)
: dataModelBuilder.buildDataModel(trainingPrefs);
Recommender recommender = recommenderBuilder.buildRecommender(trainingModel);
result += getEvaluation(testPrefs, recommender);
}
log.info("Evaluation result: {}", result / kFold);
return result / kFold;
}
/*protected double evaluate(RecommenderBuilder recommenderBuilder,
DataModelBuilder dataModelBuilder,
DataModel dataModel,
double trainingPercentage,
double evaluationPercentage) throws TasteException {
Preconditions.checkNotNull(recommenderBuilder);
Preconditions.checkNotNull(dataModel);
Preconditions.checkArgument(trainingPercentage >= 0.0 && trainingPercentage <= 1.0,
"Invalid trainingPercentage: " + trainingPercentage);
Preconditions.checkArgument(evaluationPercentage >= 0.0 && evaluationPercentage <= 1.0,
"Invalid evaluationPercentage: " + evaluationPercentage);
log.info("Beginning evaluation using {} of {}", trainingPercentage, dataModel);
int numPrefs = ((ImproveFileDataModel)dataModel).getNumPrefs();
int eachFoldNumPrefs = numPrefs / kFold;
ArrayList<Preference> allPrefs = (ArrayList<Preference>)((ImproveFileDataModel)dataModel).getAllPrefs();
int numUsers = dataModel.getNumUsers();
List<FastByIDMap<PreferenceArray>> allTrainingPrefs = new ArrayList<FastByIDMap<PreferenceArray>>();
for(int i = 0; i < kFold; i++){
allTrainingPrefs.add(new FastByIDMap<PreferenceArray>(1 + (int) (evaluationPercentage * numUsers)));
}
FastByIDMap<PreferenceArray> testPrefs = null;
int randomNums[] = generateRandomNums(numPrefs);
for(int i = 0; i < kFold; i++){
for(int j = i * eachFoldNumPrefs; j < ((i + 1) * eachFoldNumPrefs); j++){
Preference pref = allPrefs.get(randomNums[j]);
long userID = pref.getUserID();
long itemID = pref.getItemID();
if(allTrainingPrefs.get(i).containsKey(userID)){
PreferenceArray prefArray = allTrainingPrefs.get(i).get(userID);
PreferenceArray newPrefArray = new GenericUserPreferenceArray(prefArray.length() + 1);
for(int k = 0; k < prefArray.length(); k++){
newPrefArray.setItemID(k, prefArray.getItemID(k));
newPrefArray.setValue(k, prefArray.getValue(k));
}
newPrefArray.setUserID(0, userID);
newPrefArray.setItemID(prefArray.length(), itemID);
newPrefArray.setValue(prefArray.length(), pref.getValue());
allTrainingPrefs.get(i).remove(userID);
allTrainingPrefs.get(i).put(userID, newPrefArray);
}else{
PreferenceArray prefArray = new GenericUserPreferenceArray(1);
prefArray.setUserID(0, userID);
prefArray.setItemID(0, itemID);
prefArray.setValue(0, pref.getValue());
allTrainingPrefs.get(i).put(userID, prefArray);
}
}
}
for(int i = (kFold - 1) * eachFoldNumPrefs; i < numPrefs; i++){
Preference pref = allPrefs.get(randomNums[i]);
long userID = pref.getUserID();
long itemID = pref.getItemID();
FastByIDMap<PreferenceArray> trainingPrefs = allTrainingPrefs.get(kFold - 1);
if(trainingPrefs.containsKey(userID)){
PreferenceArray prefArray = trainingPrefs.get(userID);
PreferenceArray newPrefArray = new GenericUserPreferenceArray(prefArray.length() + 1);
for(int k = 0; k < prefArray.length(); k++){
newPrefArray.setItemID(k, prefArray.getItemID(k));
newPrefArray.setValue(k, prefArray.getValue(k));
}
newPrefArray.setUserID(0, userID);
newPrefArray.setItemID(prefArray.length(), itemID);
newPrefArray.setValue(prefArray.length(), pref.getValue());
trainingPrefs.remove(userID);
trainingPrefs.put(userID, newPrefArray);
}else{
PreferenceArray prefArray = new GenericUserPreferenceArray(1);
prefArray.setUserID(0, userID);
prefArray.setItemID(0, itemID);
prefArray.setValue(0, pref.getValue());
trainingPrefs.put(userID, prefArray);
}
}
double result = 0;
for(int i = 0; i < kFold; i++){
testPrefs = allTrainingPrefs.get(i);
FastByIDMap<PreferenceArray> trainingPrefs = new FastByIDMap<PreferenceArray>(
1 + (int) (evaluationPercentage * numUsers));
for(int j = 0; j < kFold; j++){
if(j != i){
FastByIDMap<PreferenceArray> trainingPrefsPart = allTrainingPrefs.get(j);
for(Map.Entry<Long, PreferenceArray> entry : trainingPrefsPart.entrySet()){
Long userID = entry.getKey();
PreferenceArray prefArray = entry.getValue();
if(trainingPrefs.containsKey(userID)){
PreferenceArray originalPrefArray = trainingPrefs.get(userID);
PreferenceArray newPrefArray = new GenericUserPreferenceArray(originalPrefArray.length() + prefArray.length());
newPrefArray.setUserID(0, userID);
for(int k = 0; k < originalPrefArray.length(); k++){
newPrefArray.setItemID(k, originalPrefArray.getItemID(k));
newPrefArray.setValue(k, originalPrefArray.getValue(k));
}
for(int k = originalPrefArray.length(); k < (prefArray.length() + originalPrefArray.length()); k++){
newPrefArray.setItemID(k, prefArray.getItemID(k - originalPrefArray.length()));
newPrefArray.setValue(k, prefArray.getValue(k - originalPrefArray.length()));
}
trainingPrefs.remove(userID);
trainingPrefs.put(userID, newPrefArray);
}else{
trainingPrefs.put(userID, prefArray);
}
}
}
}
DataModel trainingModel = dataModelBuilder == null ? new GenericDataModel(trainingPrefs)
: dataModelBuilder.buildDataModel(trainingPrefs);
Recommender recommender = recommenderBuilder.buildRecommender(trainingModel);
result += getEvaluation(testPrefs, recommender);
log.info("Evaluation result: {}", result);
}
return result / kFold;
}*/
private int[] generateRandomNums(int num){
Object[] randomNumsTemp = null;
HashSet<Integer> hashSet = new HashSet<Integer>();
while(hashSet.size() < num){
hashSet.add(random.nextInt(num));
}
randomNumsTemp = hashSet.toArray();
int randomNums[] = new int[num];
for(int i = 0; i < num; i++){
randomNums[i] = Integer.parseInt(String.valueOf(randomNumsTemp[i]));
}
return randomNums;
}
private float capEstimatedPreference(float estimate) {
if (estimate > maxPreference) {
return maxPreference;
}
if (estimate < minPreference) {
return minPreference;
}
return estimate;
}
private double getEvaluation(FastByIDMap<PreferenceArray> testPrefs, Recommender recommender)
throws TasteException {
reset();
Collection<Callable<Void>> estimateCallables = Lists.newArrayList();
AtomicInteger noEstimateCounter = new AtomicInteger();
for (Map.Entry<Long,PreferenceArray> entry : testPrefs.entrySet()) {
estimateCallables.add(
new PreferenceEstimateCallable(recommender, entry.getKey(), entry.getValue(), noEstimateCounter));
}
log.info("Beginning evaluation of {} users", estimateCallables.size());
RunningAverageAndStdDev timing = new FullRunningAverageAndStdDev();
execute(estimateCallables, noEstimateCounter, timing);
return computeFinalEvaluation();
}
protected static void execute(Collection<Callable<Void>> callables,
AtomicInteger noEstimateCounter,
RunningAverageAndStdDev timing) throws TasteException {
Collection<Callable<Void>> wrappedCallables = wrapWithStatsCallables(callables, noEstimateCounter, timing);
int numProcessors = Runtime.getRuntime().availableProcessors();
ExecutorService executor = Executors.newFixedThreadPool(numProcessors);
log.info("Starting timing of {} tasks in {} threads", wrappedCallables.size(), numProcessors);
try {
List<Future<Void>> futures = executor.invokeAll(wrappedCallables);
// Go look for exceptions here, really
for (Future<Void> future : futures) {
future.get();
}
} catch (InterruptedException ie) {
throw new TasteException(ie);
} catch (ExecutionException ee) {
throw new TasteException(ee.getCause());
}
executor.shutdown();
}
private static Collection<Callable<Void>> wrapWithStatsCallables(Iterable<Callable<Void>> callables,
AtomicInteger noEstimateCounter,
RunningAverageAndStdDev timing) {
Collection<Callable<Void>> wrapped = Lists.newArrayList();
int count = 0;
for (Callable<Void> callable : callables) {
boolean logStats = count++ % 1000 == 0; // log every 1000 or so iterations
wrapped.add(new StatsCallable(callable, logStats, timing, noEstimateCounter));
}
return wrapped;
}
protected abstract void reset();
protected abstract void processOneEstimate(float estimatedPreference, Preference realPref);
protected abstract double computeFinalEvaluation();
public int getkFold() {
return kFold;
}
public void setkFold(int kFold) {
this.kFold = kFold;
}
public final class PreferenceEstimateCallable implements Callable<Void> {
private final Recommender recommender;
private final long testUserID;
private final PreferenceArray prefs;
private final AtomicInteger noEstimateCounter;
public PreferenceEstimateCallable(Recommender recommender,
long testUserID,
PreferenceArray prefs,
AtomicInteger noEstimateCounter) {
this.recommender = recommender;
this.testUserID = testUserID;
this.prefs = prefs;
this.noEstimateCounter = noEstimateCounter;
}
public Void call() throws TasteException {
for (Preference realPref : prefs) {
float estimatedPreference = Float.NaN;
try {
estimatedPreference = recommender.estimatePreference(testUserID, realPref.getItemID());
} catch (NoSuchUserException nsue) {
// It's possible that an item exists in the test data but not training data in which case
// NSEE will be thrown. Just ignore it and move on.
log.info("User exists in test data but not training data: {}", testUserID);
} catch (NoSuchItemException nsie) {
log.info("Item exists in test data but not training data: {}", realPref.getItemID());
}
if (Float.isNaN(estimatedPreference)) {
noEstimateCounter.incrementAndGet();
} else {
estimatedPreference = capEstimatedPreference(estimatedPreference);
processOneEstimate(estimatedPreference, realPref);
}
}
return null;
}
}
}
| true
| true
|
public double evaluate(RecommenderBuilder recommenderBuilder,
DataModelBuilder dataModelBuilder, DataModel dataModel, int kFold,
double evaluationPercentage) throws TasteException {
Preconditions.checkNotNull(recommenderBuilder);
Preconditions.checkNotNull(dataModel);
Preconditions.checkArgument(kFold > 0, "Invalid kFold: " + kFold);
Preconditions.checkArgument(evaluationPercentage >= 0.0
&& evaluationPercentage <= 1.0,
"Invalid evaluationPercentage: " + evaluationPercentage);
this.kFold = kFold;
log.info("Beginning evaluation using {} FoldCross of {}", kFold,
dataModel);
int numPrefs = ((KFoldCrossFileDataModel) dataModel).getNumPrefs();
int eachFoldNumPrefs = numPrefs / kFold;
ArrayList<Preference> allPrefs = (ArrayList<Preference>) ((KFoldCrossFileDataModel) dataModel)
.getAllPrefs();
int numUsers = dataModel.getNumUsers();
List<FastByIDMap<PreferenceArray>> allTrainingPrefs = new ArrayList<FastByIDMap<PreferenceArray>>();
for (int i = 0; i < kFold; i++) {
allTrainingPrefs.add(new FastByIDMap<PreferenceArray>(
1 + (int) (evaluationPercentage * numUsers)));
}
FastByIDMap<PreferenceArray> testPrefs = null;
int randomNums[] = generateRandomNums(numPrefs);
for (int i = 0; i < kFold; i++) {
for (int j = i * eachFoldNumPrefs; j < ((i + 1) * eachFoldNumPrefs); j++) {
Preference pref = allPrefs.get(randomNums[j]);
long userID = pref.getUserID();
long itemID = pref.getItemID();
if (allTrainingPrefs.get(i).containsKey(userID)) {
PreferenceArray prefArray = allTrainingPrefs.get(i).get(
userID);
PreferenceArray newPrefArray = new GenericUserPreferenceArray(
prefArray.length() + 1);
for (int k = 0; k < prefArray.length(); k++) {
newPrefArray.setItemID(k, prefArray.getItemID(k));
newPrefArray.setValue(k, prefArray.getValue(k));
}
newPrefArray.setUserID(0, userID);
newPrefArray.setItemID(prefArray.length(), itemID);
newPrefArray.setValue(prefArray.length(), pref.getValue());
allTrainingPrefs.get(i).remove(userID);
allTrainingPrefs.get(i).put(userID, newPrefArray);
} else {
PreferenceArray prefArray = new GenericUserPreferenceArray(
1);
prefArray.setUserID(0, userID);
prefArray.setItemID(0, itemID);
prefArray.setValue(0, pref.getValue());
allTrainingPrefs.get(i).put(userID, prefArray);
}
}
}
for(int i = (kFold - 1) * eachFoldNumPrefs; i < numPrefs; i++){
Preference pref = allPrefs.get(randomNums[i]);
long userID = pref.getUserID();
long itemID = pref.getItemID();
FastByIDMap<PreferenceArray> trainingPrefs = allTrainingPrefs.get(kFold - 1);
if(trainingPrefs.containsKey(userID)){
PreferenceArray prefArray = trainingPrefs.get(userID);
PreferenceArray newPrefArray = new GenericUserPreferenceArray(prefArray.length() + 1);
for(int k = 0; k < prefArray.length(); k++){
newPrefArray.setItemID(k, prefArray.getItemID(k));
newPrefArray.setValue(k, prefArray.getValue(k));
}
newPrefArray.setUserID(0, userID);
newPrefArray.setItemID(prefArray.length(), itemID);
newPrefArray.setValue(prefArray.length(), pref.getValue());
trainingPrefs.remove(userID);
trainingPrefs.put(userID, newPrefArray);
}else{
PreferenceArray prefArray = new GenericUserPreferenceArray(1);
prefArray.setUserID(0, userID);
prefArray.setItemID(0, itemID);
prefArray.setValue(0, pref.getValue());
trainingPrefs.put(userID, prefArray);
}
}
double result = 0;
for(int i = 0; i < kFold; i++){
testPrefs = allTrainingPrefs.get(i);
FastByIDMap<PreferenceArray> trainingPrefs = new FastByIDMap<PreferenceArray>(
1 + (int) (evaluationPercentage * numUsers));
for(int j = 0; j < kFold; j++){
if(j != i){
FastByIDMap<PreferenceArray> trainingPrefsPart = allTrainingPrefs.get(j);
for(Map.Entry<Long, PreferenceArray> entry : trainingPrefsPart.entrySet()){
Long userID = entry.getKey();
PreferenceArray prefArray = entry.getValue();
if(trainingPrefs.containsKey(userID)){
PreferenceArray originalPrefArray = trainingPrefs.get(userID);
PreferenceArray newPrefArray = new GenericUserPreferenceArray(originalPrefArray.length() + prefArray.length());
newPrefArray.setUserID(0, userID);
for(int k = 0; k < originalPrefArray.length(); k++){
newPrefArray.setItemID(k, originalPrefArray.getItemID(k));
newPrefArray.setValue(k, originalPrefArray.getValue(k));
}
for(int k = originalPrefArray.length(); k < (prefArray.length() + originalPrefArray.length()); k++){
newPrefArray.setItemID(k, prefArray.getItemID(k - originalPrefArray.length()));
newPrefArray.setValue(k, prefArray.getValue(k - originalPrefArray.length()));
}
trainingPrefs.remove(userID);
trainingPrefs.put(userID, newPrefArray);
}else{
trainingPrefs.put(userID, prefArray);
}
}
}
}
DataModel trainingModel = dataModelBuilder == null ? new GenericDataModel(trainingPrefs)
: dataModelBuilder.buildDataModel(trainingPrefs);
Recommender recommender = recommenderBuilder.buildRecommender(trainingModel);
result += getEvaluation(testPrefs, recommender);
}
log.info("Evaluation result: {}", result / kFold);
return result / kFold;
}
|
public double evaluate(RecommenderBuilder recommenderBuilder,
DataModelBuilder dataModelBuilder, DataModel dataModel, int kFold,
double evaluationPercentage) throws TasteException {
Preconditions.checkNotNull(recommenderBuilder);
Preconditions.checkNotNull(dataModel);
Preconditions.checkArgument(kFold > 0, "Invalid kFold: " + kFold);
Preconditions.checkArgument(evaluationPercentage >= 0.0
&& evaluationPercentage <= 1.0,
"Invalid evaluationPercentage: " + evaluationPercentage);
this.kFold = kFold;
log.info("Beginning evaluation using {} FoldCross of {}", kFold,
dataModel);
int numPrefs = ((KFoldCrossFileDataModel) dataModel).getNumPrefs();
int eachFoldNumPrefs = numPrefs / kFold;
ArrayList<Preference> allPrefs = (ArrayList<Preference>) ((KFoldCrossFileDataModel) dataModel)
.getAllPrefs();
int numUsers = dataModel.getNumUsers();
List<FastByIDMap<PreferenceArray>> allTrainingPrefs = new ArrayList<FastByIDMap<PreferenceArray>>();
for (int i = 0; i < kFold; i++) {
allTrainingPrefs.add(new FastByIDMap<PreferenceArray>(
1 + (int) (evaluationPercentage * numUsers)));
}
FastByIDMap<PreferenceArray> testPrefs = null;
int randomNums[] = generateRandomNums(numPrefs);
for (int i = 0; i < kFold; i++) {
for (int j = i * eachFoldNumPrefs; j < ((i + 1) * eachFoldNumPrefs); j++) {
Preference pref = allPrefs.get(randomNums[j]);
long userID = pref.getUserID();
long itemID = pref.getItemID();
if (allTrainingPrefs.get(i).containsKey(userID)) {
PreferenceArray prefArray = allTrainingPrefs.get(i).get(
userID);
PreferenceArray newPrefArray = new GenericUserPreferenceArray(
prefArray.length() + 1);
for (int k = 0; k < prefArray.length(); k++) {
newPrefArray.setItemID(k, prefArray.getItemID(k));
newPrefArray.setValue(k, prefArray.getValue(k));
}
newPrefArray.setUserID(0, userID);
newPrefArray.setItemID(prefArray.length(), itemID);
newPrefArray.setValue(prefArray.length(), pref.getValue());
allTrainingPrefs.get(i).remove(userID);
allTrainingPrefs.get(i).put(userID, newPrefArray);
} else {
PreferenceArray prefArray = new GenericUserPreferenceArray(
1);
prefArray.setUserID(0, userID);
prefArray.setItemID(0, itemID);
prefArray.setValue(0, pref.getValue());
allTrainingPrefs.get(i).put(userID, prefArray);
}
}
}
for(int i = (kFold * eachFoldNumPrefs); i < numPrefs; i++){
Preference pref = allPrefs.get(randomNums[i]);
long userID = pref.getUserID();
long itemID = pref.getItemID();
FastByIDMap<PreferenceArray> trainingPrefs = allTrainingPrefs.get(kFold - 1);
if(trainingPrefs.containsKey(userID)){
PreferenceArray prefArray = trainingPrefs.get(userID);
PreferenceArray newPrefArray = new GenericUserPreferenceArray(prefArray.length() + 1);
for(int k = 0; k < prefArray.length(); k++){
newPrefArray.setItemID(k, prefArray.getItemID(k));
newPrefArray.setValue(k, prefArray.getValue(k));
}
newPrefArray.setUserID(0, userID);
newPrefArray.setItemID(prefArray.length(), itemID);
newPrefArray.setValue(prefArray.length(), pref.getValue());
trainingPrefs.remove(userID);
trainingPrefs.put(userID, newPrefArray);
}else{
PreferenceArray prefArray = new GenericUserPreferenceArray(1);
prefArray.setUserID(0, userID);
prefArray.setItemID(0, itemID);
prefArray.setValue(0, pref.getValue());
trainingPrefs.put(userID, prefArray);
}
}
double result = 0;
for(int i = 0; i < kFold; i++){
testPrefs = allTrainingPrefs.get(i);
FastByIDMap<PreferenceArray> trainingPrefs = new FastByIDMap<PreferenceArray>(
1 + (int) (evaluationPercentage * numUsers));
for(int j = 0; j < kFold; j++){
if(j != i){
FastByIDMap<PreferenceArray> trainingPrefsPart = allTrainingPrefs.get(j);
for(Map.Entry<Long, PreferenceArray> entry : trainingPrefsPart.entrySet()){
Long userID = entry.getKey();
PreferenceArray prefArray = entry.getValue();
if(trainingPrefs.containsKey(userID)){
PreferenceArray originalPrefArray = trainingPrefs.get(userID);
PreferenceArray newPrefArray = new GenericUserPreferenceArray(originalPrefArray.length() + prefArray.length());
newPrefArray.setUserID(0, userID);
for(int k = 0; k < originalPrefArray.length(); k++){
newPrefArray.setItemID(k, originalPrefArray.getItemID(k));
newPrefArray.setValue(k, originalPrefArray.getValue(k));
}
for(int k = originalPrefArray.length(); k < (prefArray.length() + originalPrefArray.length()); k++){
newPrefArray.setItemID(k, prefArray.getItemID(k - originalPrefArray.length()));
newPrefArray.setValue(k, prefArray.getValue(k - originalPrefArray.length()));
}
trainingPrefs.remove(userID);
trainingPrefs.put(userID, newPrefArray);
}else{
trainingPrefs.put(userID, prefArray);
}
}
}
}
DataModel trainingModel = dataModelBuilder == null ? new GenericDataModel(trainingPrefs)
: dataModelBuilder.buildDataModel(trainingPrefs);
Recommender recommender = recommenderBuilder.buildRecommender(trainingModel);
result += getEvaluation(testPrefs, recommender);
}
log.info("Evaluation result: {}", result / kFold);
return result / kFold;
}
|
diff --git a/src/org/protege/editor/owl/ui/metrics/DLNameKeyPanel.java b/src/org/protege/editor/owl/ui/metrics/DLNameKeyPanel.java
index febab9f0..c0743faa 100644
--- a/src/org/protege/editor/owl/ui/metrics/DLNameKeyPanel.java
+++ b/src/org/protege/editor/owl/ui/metrics/DLNameKeyPanel.java
@@ -1,155 +1,155 @@
package org.protege.editor.owl.ui.metrics;
import org.protege.editor.core.ui.util.ComponentFactory;
import org.protege.editor.owl.ui.OWLIcons;
import javax.swing.*;
import java.awt.*;
import java.util.ArrayList;
import java.util.List;
/*
* Copyright (C) 2007, University of Manchester
*
* Modifications to the initial code base are copyright of their
* respective authors, or their employers as appropriate. Authorship
* of the modifications may be determined from the ChangeLog placed at
* the end of this file.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* Author: Matthew Horridge<br>
* The University Of Manchester<br>
* Medical Informatics Group<br>
* Date: 03-Oct-2006<br><br>
* <p/>
* matthew.horridge@cs.man.ac.uk<br>
* www.cs.man.ac.uk/~horridgm<br><br>
*/
public class DLNameKeyPanel extends JPanel {
public DLNameKeyPanel() {
List<NameObject> box = new ArrayList<NameObject>();
addExplanation(OWLIcons.getIcon("AL.png"),
- "Attributive language. This is the base langauge which allows:" + "<ul><li>Atomic negation (negation of concepts that do not appear on the left hand side of axioms)</li>" + "<li>Concept intersection</li>" + "<li>Universal restrictions</li>" + "<li>Limited existential quatification (restrictions that only have fillers " + "of Thing)</li></ul>",
+ "Attributive language. This is the base language which allows:" + "<ul><li>Atomic negation (negation of concepts that do not appear on the left hand side of axioms)</li>" + "<li>Concept intersection</li>" + "<li>Universal restrictions</li>" + "<li>Limited existential quatification (restrictions that only have fillers " + "of Thing)</li></ul>",
box);
// addExplanation(OWLIcons.getIcon("F.png"), "Attributive language", box);
addExplanation(OWLIcons.getIcon("FLM.png"),
"A sub-langauge of AL, which is obtained by disallowing atomic negation",
box);
addExplanation(OWLIcons.getIcon("FLO.png"),
"A sub-language of FLo, which is obtained by disallowing limited existential quantification",
box);
addExplanation(OWLIcons.getIcon("C.png"), "Complex concept negation", box);
addExplanation(OWLIcons.getIcon("S.png"), "An abbreviation for AL and C with transitive properties", box);
addExplanation(OWLIcons.getIcon("H.png"), "Role hierarchy (subproperties - rdfs:subPropertyOf)", box);
addExplanation(OWLIcons.getIcon("O.png"),
"Nominals. (Enumerated classes or object value restrictions - owl:oneOf, owl:hasValue)",
box);
addExplanation(OWLIcons.getIcon("I.png"), "Inverse properties", box);
addExplanation(OWLIcons.getIcon("N.png"),
"Cardinality restrictions (owl:Cardinality, owl:minCardianlity, owl:maxCardinality)",
box);
addExplanation(OWLIcons.getIcon("Q.png"), "Qualified cardinality restrictions (available in OWL 1.1)", box);
addExplanation(OWLIcons.getIcon("F.png"), "Functional properties", box);
addExplanation(OWLIcons.getIcon("E.png"),
"Full existential quantification (Existential restrictions that have fillers other that owl:Thing)",
box);
addExplanation(OWLIcons.getIcon("U.png"), "Concept union", box);
addExplanation(OWLIcons.getIcon("Datatype.png"), "Use of datatype properties, data values or datatypes", box);
setLayout(new BorderLayout());
JList l = new JList(box.toArray());
l.setCellRenderer(new DefaultListCellRenderer() {
public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected,
boolean cellHasFocus) {
JLabel label = (JLabel) super.getListCellRendererComponent(list,
value,
index,
isSelected,
cellHasFocus);
NameObject nameObject = (NameObject) value;
label.setIcon(nameObject.getIcon());
label.setText(nameObject.getDesc());
label.setFont(new Font(label.getFont().getName(), Font.PLAIN, 12));
label.setBorder(BorderFactory.createCompoundBorder(BorderFactory.createMatteBorder(0,
0,
1,
0,
Color.LIGHT_GRAY),
BorderFactory.createEmptyBorder(5, 2, 5, 2)));
return label;
}
});
l.setBackground(Color.WHITE);
JScrollPane sp = ComponentFactory.createScrollPane(l);
add(sp, BorderLayout.CENTER);
}
private class NameObject {
private Icon icon;
private String desc;
public NameObject(final Icon icon, String desc) {
this.icon = new Icon() {
public void paintIcon(Component c, Graphics g, int x, int y) {
icon.paintIcon(c, g, x, y);
}
public int getIconWidth() {
return 80;
}
public int getIconHeight() {
return icon.getIconHeight();
}
};
this.desc = "<html><body>" + desc + "</body></html>";
}
public Icon getIcon() {
return icon;
}
public String getDesc() {
return desc;
}
}
private void addExplanation(Icon icon, String description, List<NameObject> list) {
list.add(new NameObject(icon, description));
}
public static void main(String[] args) {
JFrame f = new JFrame();
f.setContentPane(new DLNameKeyPanel());
f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
f.setVisible(true);
}
}
| true
| true
|
public DLNameKeyPanel() {
List<NameObject> box = new ArrayList<NameObject>();
addExplanation(OWLIcons.getIcon("AL.png"),
"Attributive language. This is the base langauge which allows:" + "<ul><li>Atomic negation (negation of concepts that do not appear on the left hand side of axioms)</li>" + "<li>Concept intersection</li>" + "<li>Universal restrictions</li>" + "<li>Limited existential quatification (restrictions that only have fillers " + "of Thing)</li></ul>",
box);
// addExplanation(OWLIcons.getIcon("F.png"), "Attributive language", box);
addExplanation(OWLIcons.getIcon("FLM.png"),
"A sub-langauge of AL, which is obtained by disallowing atomic negation",
box);
addExplanation(OWLIcons.getIcon("FLO.png"),
"A sub-language of FLo, which is obtained by disallowing limited existential quantification",
box);
addExplanation(OWLIcons.getIcon("C.png"), "Complex concept negation", box);
addExplanation(OWLIcons.getIcon("S.png"), "An abbreviation for AL and C with transitive properties", box);
addExplanation(OWLIcons.getIcon("H.png"), "Role hierarchy (subproperties - rdfs:subPropertyOf)", box);
addExplanation(OWLIcons.getIcon("O.png"),
"Nominals. (Enumerated classes or object value restrictions - owl:oneOf, owl:hasValue)",
box);
addExplanation(OWLIcons.getIcon("I.png"), "Inverse properties", box);
addExplanation(OWLIcons.getIcon("N.png"),
"Cardinality restrictions (owl:Cardinality, owl:minCardianlity, owl:maxCardinality)",
box);
addExplanation(OWLIcons.getIcon("Q.png"), "Qualified cardinality restrictions (available in OWL 1.1)", box);
addExplanation(OWLIcons.getIcon("F.png"), "Functional properties", box);
addExplanation(OWLIcons.getIcon("E.png"),
"Full existential quantification (Existential restrictions that have fillers other that owl:Thing)",
box);
addExplanation(OWLIcons.getIcon("U.png"), "Concept union", box);
addExplanation(OWLIcons.getIcon("Datatype.png"), "Use of datatype properties, data values or datatypes", box);
setLayout(new BorderLayout());
JList l = new JList(box.toArray());
l.setCellRenderer(new DefaultListCellRenderer() {
public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected,
boolean cellHasFocus) {
JLabel label = (JLabel) super.getListCellRendererComponent(list,
value,
index,
isSelected,
cellHasFocus);
NameObject nameObject = (NameObject) value;
label.setIcon(nameObject.getIcon());
label.setText(nameObject.getDesc());
label.setFont(new Font(label.getFont().getName(), Font.PLAIN, 12));
label.setBorder(BorderFactory.createCompoundBorder(BorderFactory.createMatteBorder(0,
0,
1,
0,
Color.LIGHT_GRAY),
BorderFactory.createEmptyBorder(5, 2, 5, 2)));
return label;
}
});
l.setBackground(Color.WHITE);
JScrollPane sp = ComponentFactory.createScrollPane(l);
add(sp, BorderLayout.CENTER);
}
|
public DLNameKeyPanel() {
List<NameObject> box = new ArrayList<NameObject>();
addExplanation(OWLIcons.getIcon("AL.png"),
"Attributive language. This is the base language which allows:" + "<ul><li>Atomic negation (negation of concepts that do not appear on the left hand side of axioms)</li>" + "<li>Concept intersection</li>" + "<li>Universal restrictions</li>" + "<li>Limited existential quatification (restrictions that only have fillers " + "of Thing)</li></ul>",
box);
// addExplanation(OWLIcons.getIcon("F.png"), "Attributive language", box);
addExplanation(OWLIcons.getIcon("FLM.png"),
"A sub-langauge of AL, which is obtained by disallowing atomic negation",
box);
addExplanation(OWLIcons.getIcon("FLO.png"),
"A sub-language of FLo, which is obtained by disallowing limited existential quantification",
box);
addExplanation(OWLIcons.getIcon("C.png"), "Complex concept negation", box);
addExplanation(OWLIcons.getIcon("S.png"), "An abbreviation for AL and C with transitive properties", box);
addExplanation(OWLIcons.getIcon("H.png"), "Role hierarchy (subproperties - rdfs:subPropertyOf)", box);
addExplanation(OWLIcons.getIcon("O.png"),
"Nominals. (Enumerated classes or object value restrictions - owl:oneOf, owl:hasValue)",
box);
addExplanation(OWLIcons.getIcon("I.png"), "Inverse properties", box);
addExplanation(OWLIcons.getIcon("N.png"),
"Cardinality restrictions (owl:Cardinality, owl:minCardianlity, owl:maxCardinality)",
box);
addExplanation(OWLIcons.getIcon("Q.png"), "Qualified cardinality restrictions (available in OWL 1.1)", box);
addExplanation(OWLIcons.getIcon("F.png"), "Functional properties", box);
addExplanation(OWLIcons.getIcon("E.png"),
"Full existential quantification (Existential restrictions that have fillers other that owl:Thing)",
box);
addExplanation(OWLIcons.getIcon("U.png"), "Concept union", box);
addExplanation(OWLIcons.getIcon("Datatype.png"), "Use of datatype properties, data values or datatypes", box);
setLayout(new BorderLayout());
JList l = new JList(box.toArray());
l.setCellRenderer(new DefaultListCellRenderer() {
public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected,
boolean cellHasFocus) {
JLabel label = (JLabel) super.getListCellRendererComponent(list,
value,
index,
isSelected,
cellHasFocus);
NameObject nameObject = (NameObject) value;
label.setIcon(nameObject.getIcon());
label.setText(nameObject.getDesc());
label.setFont(new Font(label.getFont().getName(), Font.PLAIN, 12));
label.setBorder(BorderFactory.createCompoundBorder(BorderFactory.createMatteBorder(0,
0,
1,
0,
Color.LIGHT_GRAY),
BorderFactory.createEmptyBorder(5, 2, 5, 2)));
return label;
}
});
l.setBackground(Color.WHITE);
JScrollPane sp = ComponentFactory.createScrollPane(l);
add(sp, BorderLayout.CENTER);
}
|
diff --git a/core/src/net/sf/openrocket/unit/Unit.java b/core/src/net/sf/openrocket/unit/Unit.java
index a0522c2b..75931d05 100644
--- a/core/src/net/sf/openrocket/unit/Unit.java
+++ b/core/src/net/sf/openrocket/unit/Unit.java
@@ -1,209 +1,209 @@
package net.sf.openrocket.unit;
import java.text.DecimalFormat;
import net.sf.openrocket.util.Chars;
public abstract class Unit {
/** No unit */
public static final Unit NOUNIT = new GeneralUnit(1, "" + Chars.ZWSP, 2);
protected final double multiplier; // meters = units * multiplier
protected final String unit;
/**
* Creates a new Unit with a given multiplier and unit name.
*
* Multiplier e.g. 1 in = 0.0254 meter
*
* @param multiplier The multiplier to use on the value, 1 this unit == multiplier SI units
* @param unit The unit's short form.
*/
public Unit(double multiplier, String unit) {
if (multiplier == 0)
throw new IllegalArgumentException("Unit has multiplier=0");
this.multiplier = multiplier;
this.unit = unit;
}
/**
* Converts from SI units to this unit. The default implementation simply divides by the
* multiplier.
*
* @param value Value in SI unit
* @return Value in these units
*/
public double toUnit(double value) {
return value / multiplier;
}
/**
* Convert from this type of units to SI units. The default implementation simply
* multiplies by the multiplier.
*
* @param value Value in these units
* @return Value in SI units
*/
public double fromUnit(double value) {
return value * multiplier;
}
/**
* Return the unit name.
*
* @return the unit.
*/
public String getUnit() {
return unit;
}
/**
* Whether the value and unit should be separated by a whitespace. This method
* returns true as most units have a space between the value and unit, but may be
* overridden.
*
* @return true if the value and unit should be separated
*/
public boolean hasSpace() {
return true;
}
@Override
public String toString() {
return unit;
}
// TODO: Should this use grouping separator ("#,##0.##")?
private static final DecimalFormat intFormat = new DecimalFormat("#");
private static final DecimalFormat decFormat = new DecimalFormat("0.0##");
private static final DecimalFormat expFormat = new DecimalFormat("0.00E0");
/**
* Format the given value (in SI units) to a string representation of the value in this
* units. An suitable amount of decimals for the unit are used in the representation.
* The unit is not appended to the numerical value.
*
* @param value Value in SI units.
* @return A string representation of the number in these units.
*/
public String toString(double value) {
double val = toUnit(value);
if (Math.abs(val) > 1E6) {
return expFormat.format(val);
}
if (Math.abs(val) >= 100) {
return intFormat.format(val);
}
if (Math.abs(val) <= 0.0005) {
return "0";
}
val = roundForDecimalFormat(val);
// Check for approximate integer
- if (Math.abs(val - Math.floor(val)) < 0.001) {
+ if (Math.abs(val - Math.floor(val)) < 0.0001) {
return intFormat.format(val);
}
return decFormat.format(val);
}
protected double roundForDecimalFormat(double val) {
double sign = Math.signum(val);
val = Math.abs(val);
double mul = 1.0;
while (val < 100) {
mul *= 10;
val *= 10;
}
val = Math.rint(val) / mul * sign;
return val;
}
/**
* Return a string with the specified value and unit. The value is converted into
* this unit. If <code>value</code> is NaN, returns "N/A" (not applicable).
*
* @param value the value to print in SI units.
* @return the value and unit, or "N/A".
*/
public String toStringUnit(double value) {
if (Double.isNaN(value))
return "N/A";
String s = toString(value);
if (hasSpace())
s += " ";
s += unit;
return s;
}
/**
* Creates a new Value object with the specified value and this unit.
*
* @param value the value to set.
* @return a new Value object.
*/
public Value toValue(double value) {
return new Value(value, this);
}
/**
* Round the value (in the current units) to a precision suitable for rough valuing
* (approximately 2 significant numbers).
*
* @param value Value in current units
* @return Rounded value.
*/
public abstract double round(double value);
/**
* Return the next rounded value after the given value.
* @param value Value in these units.
* @return The next suitable rounded value.
*/
public abstract double getNextValue(double value);
/**
* Return the previous rounded value before the given value.
* @param value Value in these units.
* @return The previous suitable rounded value.
*/
public abstract double getPreviousValue(double value);
//public abstract ArrayList<Tick> getTicks(double start, double end, double scale);
/**
* Return ticks in the range start - end (in current units). minor is the minimum
* distance between minor, non-notable ticks and major the minimum distance between
* major non-notable ticks. The values are in current units, i.e. no conversion is
* performed.
*/
public abstract Tick[] getTicks(double start, double end, double minor, double major);
/**
* Compares whether the two units are equal. Equality requires the unit classes,
* multiplier values and units to be equal.
*/
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (this.getClass() != other.getClass())
return false;
return ((this.multiplier == ((Unit) other).multiplier) && this.unit.equals(((Unit) other).unit));
}
@Override
public int hashCode() {
return this.getClass().hashCode() + this.unit.hashCode();
}
}
| true
| true
|
public String toString(double value) {
double val = toUnit(value);
if (Math.abs(val) > 1E6) {
return expFormat.format(val);
}
if (Math.abs(val) >= 100) {
return intFormat.format(val);
}
if (Math.abs(val) <= 0.0005) {
return "0";
}
val = roundForDecimalFormat(val);
// Check for approximate integer
if (Math.abs(val - Math.floor(val)) < 0.001) {
return intFormat.format(val);
}
return decFormat.format(val);
}
|
public String toString(double value) {
double val = toUnit(value);
if (Math.abs(val) > 1E6) {
return expFormat.format(val);
}
if (Math.abs(val) >= 100) {
return intFormat.format(val);
}
if (Math.abs(val) <= 0.0005) {
return "0";
}
val = roundForDecimalFormat(val);
// Check for approximate integer
if (Math.abs(val - Math.floor(val)) < 0.0001) {
return intFormat.format(val);
}
return decFormat.format(val);
}
|
diff --git a/SARA/src/sara/GetHighlightServlet.java b/SARA/src/sara/GetHighlightServlet.java
index 301c2a0..eaaf494 100644
--- a/SARA/src/sara/GetHighlightServlet.java
+++ b/SARA/src/sara/GetHighlightServlet.java
@@ -1,22 +1,22 @@
package sara;
import sara.SARADocument;
import sara.Highlight;
import sara.Selection;
import sara.HighlightService;
import java.io.IOException;
import java.servlet.http.*;
public class GetHighlightServlet extends HttpServlet {
public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
if(req.getParameter("document") != null) {
HighlightService hs = new HighlightService();
Iterator<Highlight> highlights = hs.listHighlights();
while(highlights.hasNext()) {
- out.print(highlight.next().toJson());
+ out.print(highlights.next().toJson());
}
}
}
}
| true
| true
|
public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
if(req.getParameter("document") != null) {
HighlightService hs = new HighlightService();
Iterator<Highlight> highlights = hs.listHighlights();
while(highlights.hasNext()) {
out.print(highlight.next().toJson());
}
}
}
|
public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
if(req.getParameter("document") != null) {
HighlightService hs = new HighlightService();
Iterator<Highlight> highlights = hs.listHighlights();
while(highlights.hasNext()) {
out.print(highlights.next().toJson());
}
}
}
|
diff --git a/src/brutes/server/db/DatasManager.java b/src/brutes/server/db/DatasManager.java
index 47b84bb..789ec5c 100644
--- a/src/brutes/server/db/DatasManager.java
+++ b/src/brutes/server/db/DatasManager.java
@@ -1,187 +1,187 @@
package brutes.server.db;
import brutes.server.ui;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.sql.*;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
* @author Thiktak
*/
public class DatasManager {
static private Connection con;
public static Connection getInstance(String type, String dbpath) throws IOException {
Class classType;
switch (type) {
case "sqlite":
try {
classType = Class.forName("org.sqlite.JDBC");
dbpath = "jdbc:sqlite:" + dbpath;
} catch (ClassNotFoundException e) {
throw new IOException(e);
}
break;
default:
throw new IOException(type + " SQL support not exists");
}
try {
DatasManager.con = DriverManager.getConnection(dbpath);
} catch (SQLException ex) {
Logger.getLogger(DatasManager.class.getName()).log(Level.SEVERE, null, ex);
}
return DatasManager.con;
}
public static void populate() throws IOException {
try {
Connection c = DatasManager.getInstance();
c.createStatement().executeUpdate("CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY AUTOINCREMENT, pseudo TEXT, password TEXT, token TEXT, date_created DATETIME DEFAULT current_timestamp)");
c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('Bots', 'WTF')");
c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('Thiktak', 'root1')");
c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('Kirauks', 'root2')");
c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('Bruno', 'mdp')");
- c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('User', '')");
+ c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('User', 'user')");
c.createStatement().executeUpdate("CREATE TABLE IF NOT EXISTS brutes (id INTEGER PRIMARY KEY AUTOINCREMENT, user_id INTEGER, name TEXT, level INTEGER, life INTEGER, strength INTEGER, speed INTEGER, image_id INTEGER, date_created DATETIME DEFAULT current_timestamp)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 1, 'Rukia', 5, 34, 8, 13)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 2, 'Skitt', 1, 10, 4, 3)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 3, 'Tulipe', 3, 22, 19, 6)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 4, 'Zazardify', 2, 16, 14, 3)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 5, 'Gwenn', 3, 26, 5, 21)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 6, 'Ruelle', 2, 16, 4, 10)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 7, 'Sybelle', 3, 24, 10, 4)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 8, 'Sheldon', 1, 10, 1, 6)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 9, 'Hassen', 10, 67, 32, 17)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 10, 'Krossork', 7, 48, 21, 11)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (2, 11, 'Thik', 1, 10, 3, 4)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (3, 12, 'Rauks', 1, 10, 4, 3)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (4, 13, 'Brubru', 1, 10, 5, 2)");
c.createStatement().executeUpdate("CREATE TABLE IF NOT EXISTS bonus (id INTEGER PRIMARY KEY AUTOINCREMENT, brute_id INTEGER, name TEXT, level INTEGER, life INTEGER, strength INTEGER, speed INTEGER, image_id INTEGER)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 1, 31, 'Mouton', 1, 10, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 2, 32, 'Amulette', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 3, 33, 'Dagues Gha', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 4, 34, 'Parchemin', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 4, 35, 'Arc Bricolo', 1, 0, 5, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 5, 36, 'Chien', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 6, 37, 'Troll', 1, 0, 5, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 7, 38, 'Epouventail', 1, 20, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 8, 39, 'Squelette', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 9, 40, 'Excalibur', 1, 0, 20, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 10, 41, 'Faux de Sang', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 42, 'Couteau', 1, 0, 5, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 43, 'Koala Rasta', 1, 0, 0, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 44, 'Sceptre', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 45, 'Tortue Luth', 1, 15, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 46, 'Loup', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 47, 'Firechat', 1, 0, 10, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 48, 'Martouïe', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 49, 'Martlave', 1, 0, 20, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 50, 'Casque', 1, 30, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 51, 'Grimoire', 1, 0, 0, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 52, 'Vaudou', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 53, 'Vaudou', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 54, 'Vaudou', 1, 10, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 55, 'Mineur', 1, 0, 5, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 56, 'Zebrarc', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 57, 'Singe', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 58, 'Démon Loup', 1, 5, 5, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 59, 'Mage', 1, 0, 0, 15)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 60, 'Roc enchanté', 1, 20, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 61, 'Canidomme', 1, 0, 20, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 62, 'Myosotis', 1, 5, 0, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 63, 'Scorpion', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 64, 'Aragog', 1, 10, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 65, 'Pelle', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 66, 'Bâton de glace', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 67, 'Bâton de feu', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 68, 'Peluche', 1, 15, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 69, 'Epée', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 70, 'Foxeur', 1, 25, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 71, 'Ecureil', 1, 0, 0, 20)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 72, 'Lapin affamé', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 73, 'Arbre', 1, 20, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 74, 'Gros Lapin', 1, 10, 0, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 75, 'Koala de Sang', 1, 20, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 76, 'Sanglier', 1, 0, 15, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 77, 'Carotte', 1, 30, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 78, 'Crocodile', 1, 0, 10, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 79, 'Epée double', 1, 0, 20, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 80, 'Tortue guerrière', 1, 10, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 81, 'Pingu', 1, 10, 0, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 82, 'Kipik', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 83, 'Gelée verte', 1, 15, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 84, 'Gelée rouge', 1, 0, 5, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 85, 'Poupée', 1, 20, 0, 0)");
c.createStatement().executeUpdate("CREATE TABLE IF NOT EXISTS fights (id INTEGER PRIMARY KEY AUTOINCREMENT, brute_id1 INTEGER, brute_id2 INTEGER, winner_id INTEGER, date_created DATETIME DEFAULT current_timestamp)");
} catch (SQLException ex) {
Logger.getLogger(DatasManager.class.getName()).log(Level.SEVERE, null, ex);
}
}
public static Connection getInstance() throws IOException {
if (DatasManager.con == null) {
throw new IOException("No instance of dataManager");
}
return DatasManager.con;
}
public static ResultSet exec(String query) throws IOException, SQLException {
return DatasManager.getInstance().createStatement().executeQuery(query);
}
public static PreparedStatement prepare(String query) throws IOException, SQLException {
return DatasManager.getInstance().prepareStatement(query);
}
public static Statement getStatement() throws IOException, SQLException {
return DatasManager.getInstance().createStatement();
}
public static <T> void save(T obj) throws IOException {
try {
Class classObj = Class.forName(ui.getClassPath(DatasManager.class) + ".entity." + obj.getClass().getSimpleName() + "Entity");
Logger.getLogger(DatasManager.class.getName()).log(Level.INFO, "Call *.entity.{0}Entity::save", obj.getClass().getSimpleName());
classObj.getMethod("save", new Class[]{Connection.class, obj.getClass()}).invoke(null, DatasManager.getInstance(), obj);
} catch (ClassNotFoundException | NoSuchMethodException | InvocationTargetException | IllegalAccessException | IllegalArgumentException ex) {
Logger.getLogger(DatasManager.class.getName()).log(Level.SEVERE, null, ex);
}
}
public static <T> T insert(T obj) throws IOException {
try {
Class classObj = Class.forName(ui.getClassPath(DatasManager.class) + ".entity." + obj.getClass().getSimpleName() + "Entity");
Logger.getLogger(DatasManager.class.getName()).log(Level.INFO, "Call *.entity.{0}Entity::insert", obj.getClass().getSimpleName());
return (T) classObj.getMethod("insert", new Class[]{Connection.class, obj.getClass()}).invoke(null, DatasManager.getInstance(), obj);
} catch (ClassNotFoundException | NoSuchMethodException | InvocationTargetException | IllegalAccessException | IllegalArgumentException ex) {
Logger.getLogger(DatasManager.class.getName()).log(Level.SEVERE, null, ex);
}
return null;
}
public static <T> void delete(T obj) throws IOException {
try {
Class classObj = Class.forName(ui.getClassPath(DatasManager.class) + ".entity." + obj.getClass().getSimpleName() + "Entity");
Logger.getLogger(DatasManager.class.getName()).log(Level.INFO, "Call *.entity.{0}Entity::delete", obj.getClass().getSimpleName());
classObj.getMethod("delete", new Class[]{Connection.class, obj.getClass()}).invoke(null, DatasManager.getInstance(), obj);
} catch (ClassNotFoundException | NoSuchMethodException | InvocationTargetException | IllegalAccessException | IllegalArgumentException ex) {
Logger.getLogger(DatasManager.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
| true
| true
|
public static void populate() throws IOException {
try {
Connection c = DatasManager.getInstance();
c.createStatement().executeUpdate("CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY AUTOINCREMENT, pseudo TEXT, password TEXT, token TEXT, date_created DATETIME DEFAULT current_timestamp)");
c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('Bots', 'WTF')");
c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('Thiktak', 'root1')");
c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('Kirauks', 'root2')");
c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('Bruno', 'mdp')");
c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('User', '')");
c.createStatement().executeUpdate("CREATE TABLE IF NOT EXISTS brutes (id INTEGER PRIMARY KEY AUTOINCREMENT, user_id INTEGER, name TEXT, level INTEGER, life INTEGER, strength INTEGER, speed INTEGER, image_id INTEGER, date_created DATETIME DEFAULT current_timestamp)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 1, 'Rukia', 5, 34, 8, 13)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 2, 'Skitt', 1, 10, 4, 3)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 3, 'Tulipe', 3, 22, 19, 6)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 4, 'Zazardify', 2, 16, 14, 3)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 5, 'Gwenn', 3, 26, 5, 21)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 6, 'Ruelle', 2, 16, 4, 10)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 7, 'Sybelle', 3, 24, 10, 4)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 8, 'Sheldon', 1, 10, 1, 6)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 9, 'Hassen', 10, 67, 32, 17)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 10, 'Krossork', 7, 48, 21, 11)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (2, 11, 'Thik', 1, 10, 3, 4)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (3, 12, 'Rauks', 1, 10, 4, 3)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (4, 13, 'Brubru', 1, 10, 5, 2)");
c.createStatement().executeUpdate("CREATE TABLE IF NOT EXISTS bonus (id INTEGER PRIMARY KEY AUTOINCREMENT, brute_id INTEGER, name TEXT, level INTEGER, life INTEGER, strength INTEGER, speed INTEGER, image_id INTEGER)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 1, 31, 'Mouton', 1, 10, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 2, 32, 'Amulette', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 3, 33, 'Dagues Gha', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 4, 34, 'Parchemin', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 4, 35, 'Arc Bricolo', 1, 0, 5, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 5, 36, 'Chien', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 6, 37, 'Troll', 1, 0, 5, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 7, 38, 'Epouventail', 1, 20, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 8, 39, 'Squelette', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 9, 40, 'Excalibur', 1, 0, 20, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 10, 41, 'Faux de Sang', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 42, 'Couteau', 1, 0, 5, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 43, 'Koala Rasta', 1, 0, 0, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 44, 'Sceptre', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 45, 'Tortue Luth', 1, 15, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 46, 'Loup', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 47, 'Firechat', 1, 0, 10, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 48, 'Martouïe', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 49, 'Martlave', 1, 0, 20, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 50, 'Casque', 1, 30, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 51, 'Grimoire', 1, 0, 0, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 52, 'Vaudou', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 53, 'Vaudou', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 54, 'Vaudou', 1, 10, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 55, 'Mineur', 1, 0, 5, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 56, 'Zebrarc', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 57, 'Singe', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 58, 'Démon Loup', 1, 5, 5, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 59, 'Mage', 1, 0, 0, 15)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 60, 'Roc enchanté', 1, 20, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 61, 'Canidomme', 1, 0, 20, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 62, 'Myosotis', 1, 5, 0, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 63, 'Scorpion', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 64, 'Aragog', 1, 10, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 65, 'Pelle', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 66, 'Bâton de glace', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 67, 'Bâton de feu', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 68, 'Peluche', 1, 15, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 69, 'Epée', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 70, 'Foxeur', 1, 25, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 71, 'Ecureil', 1, 0, 0, 20)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 72, 'Lapin affamé', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 73, 'Arbre', 1, 20, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 74, 'Gros Lapin', 1, 10, 0, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 75, 'Koala de Sang', 1, 20, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 76, 'Sanglier', 1, 0, 15, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 77, 'Carotte', 1, 30, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 78, 'Crocodile', 1, 0, 10, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 79, 'Epée double', 1, 0, 20, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 80, 'Tortue guerrière', 1, 10, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 81, 'Pingu', 1, 10, 0, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 82, 'Kipik', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 83, 'Gelée verte', 1, 15, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 84, 'Gelée rouge', 1, 0, 5, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 85, 'Poupée', 1, 20, 0, 0)");
c.createStatement().executeUpdate("CREATE TABLE IF NOT EXISTS fights (id INTEGER PRIMARY KEY AUTOINCREMENT, brute_id1 INTEGER, brute_id2 INTEGER, winner_id INTEGER, date_created DATETIME DEFAULT current_timestamp)");
} catch (SQLException ex) {
Logger.getLogger(DatasManager.class.getName()).log(Level.SEVERE, null, ex);
}
}
|
public static void populate() throws IOException {
try {
Connection c = DatasManager.getInstance();
c.createStatement().executeUpdate("CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY AUTOINCREMENT, pseudo TEXT, password TEXT, token TEXT, date_created DATETIME DEFAULT current_timestamp)");
c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('Bots', 'WTF')");
c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('Thiktak', 'root1')");
c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('Kirauks', 'root2')");
c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('Bruno', 'mdp')");
c.createStatement().executeUpdate("INSERT INTO users (pseudo, password) VALUES ('User', 'user')");
c.createStatement().executeUpdate("CREATE TABLE IF NOT EXISTS brutes (id INTEGER PRIMARY KEY AUTOINCREMENT, user_id INTEGER, name TEXT, level INTEGER, life INTEGER, strength INTEGER, speed INTEGER, image_id INTEGER, date_created DATETIME DEFAULT current_timestamp)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 1, 'Rukia', 5, 34, 8, 13)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 2, 'Skitt', 1, 10, 4, 3)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 3, 'Tulipe', 3, 22, 19, 6)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 4, 'Zazardify', 2, 16, 14, 3)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 5, 'Gwenn', 3, 26, 5, 21)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 6, 'Ruelle', 2, 16, 4, 10)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 7, 'Sybelle', 3, 24, 10, 4)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 8, 'Sheldon', 1, 10, 1, 6)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 9, 'Hassen', 10, 67, 32, 17)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (1, 10, 'Krossork', 7, 48, 21, 11)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (2, 11, 'Thik', 1, 10, 3, 4)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (3, 12, 'Rauks', 1, 10, 4, 3)");
c.createStatement().executeUpdate("INSERT INTO brutes (user_id, image_id, name, level, life, strength, speed) VALUES (4, 13, 'Brubru', 1, 10, 5, 2)");
c.createStatement().executeUpdate("CREATE TABLE IF NOT EXISTS bonus (id INTEGER PRIMARY KEY AUTOINCREMENT, brute_id INTEGER, name TEXT, level INTEGER, life INTEGER, strength INTEGER, speed INTEGER, image_id INTEGER)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 1, 31, 'Mouton', 1, 10, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 2, 32, 'Amulette', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 3, 33, 'Dagues Gha', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 4, 34, 'Parchemin', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 4, 35, 'Arc Bricolo', 1, 0, 5, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 5, 36, 'Chien', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 6, 37, 'Troll', 1, 0, 5, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 7, 38, 'Epouventail', 1, 20, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 8, 39, 'Squelette', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 9, 40, 'Excalibur', 1, 0, 20, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 10, 41, 'Faux de Sang', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 42, 'Couteau', 1, 0, 5, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 43, 'Koala Rasta', 1, 0, 0, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 44, 'Sceptre', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 45, 'Tortue Luth', 1, 15, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 46, 'Loup', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 47, 'Firechat', 1, 0, 10, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 48, 'Martouïe', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 49, 'Martlave', 1, 0, 20, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 50, 'Casque', 1, 30, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 51, 'Grimoire', 1, 0, 0, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 52, 'Vaudou', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 53, 'Vaudou', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 54, 'Vaudou', 1, 10, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 55, 'Mineur', 1, 0, 5, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 56, 'Zebrarc', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 57, 'Singe', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 58, 'Démon Loup', 1, 5, 5, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 59, 'Mage', 1, 0, 0, 15)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 60, 'Roc enchanté', 1, 20, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 61, 'Canidomme', 1, 0, 20, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 62, 'Myosotis', 1, 5, 0, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 63, 'Scorpion', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 64, 'Aragog', 1, 10, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 65, 'Pelle', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 66, 'Bâton de glace', 1, 0, 0, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 67, 'Bâton de feu', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 68, 'Peluche', 1, 15, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 69, 'Epée', 1, 0, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 70, 'Foxeur', 1, 25, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 71, 'Ecureil', 1, 0, 0, 20)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 72, 'Lapin affamé', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 73, 'Arbre', 1, 20, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 74, 'Gros Lapin', 1, 10, 0, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 75, 'Koala de Sang', 1, 20, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 76, 'Sanglier', 1, 0, 15, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 77, 'Carotte', 1, 30, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 78, 'Crocodile', 1, 0, 10, 10)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 79, 'Epée double', 1, 0, 20, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 80, 'Tortue guerrière', 1, 10, 10, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 81, 'Pingu', 1, 10, 0, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 82, 'Kipik', 1, 0, 15, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 83, 'Gelée verte', 1, 15, 0, 0)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 84, 'Gelée rouge', 1, 0, 5, 5)");
c.createStatement().executeUpdate("INSERT INTO bonus (brute_id, image_id, name, level, life, strength, speed) VALUES ( 0, 85, 'Poupée', 1, 20, 0, 0)");
c.createStatement().executeUpdate("CREATE TABLE IF NOT EXISTS fights (id INTEGER PRIMARY KEY AUTOINCREMENT, brute_id1 INTEGER, brute_id2 INTEGER, winner_id INTEGER, date_created DATETIME DEFAULT current_timestamp)");
} catch (SQLException ex) {
Logger.getLogger(DatasManager.class.getName()).log(Level.SEVERE, null, ex);
}
}
|
diff --git a/src/com/kkbox/toolkit/api/KKAPIRequest.java b/src/com/kkbox/toolkit/api/KKAPIRequest.java
index fa3cc94..51ad8e2 100644
--- a/src/com/kkbox/toolkit/api/KKAPIRequest.java
+++ b/src/com/kkbox/toolkit/api/KKAPIRequest.java
@@ -1,345 +1,346 @@
/* Copyright (C) 2013 KKBOX Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* KKAPIRequest
*/
package com.kkbox.toolkit.api;
import android.content.Context;
import android.net.ConnectivityManager;
import android.os.SystemClock;
import com.kkbox.toolkit.internal.api.KKAPIRequestListener;
import com.kkbox.toolkit.utils.KKDebug;
import com.kkbox.toolkit.utils.StringUtils;
import com.kkbox.toolkit.utils.UserTask;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.utils.URLEncodedUtils;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.FileEntity;
import org.apache.http.entity.InputStreamEntity;
import org.apache.http.entity.StringEntity;
import org.apache.http.entity.mime.HttpMultipartMode;
import org.apache.http.entity.mime.MultipartEntity;
import org.apache.http.entity.mime.content.ContentBody;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.protocol.HTTP;
import org.apache.http.util.EntityUtils;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import javax.crypto.Cipher;
public class KKAPIRequest extends UserTask<Object, Void, Void> {
private KKAPIRequestListener listener;
private String getParams = "";
private final String url;
private HttpClient httpclient;
private boolean isNetworkError = false;
private boolean isHttpStatusError = false;
private int httpStatusCode = 0;
private ArrayList<NameValuePair> postParams;
private ArrayList<NameValuePair> headerParams;
private MultipartEntity multipartEntity;
private StringEntity stringEntity;
private FileEntity fileEntity;
private ByteArrayEntity byteArrayEntity;
private InputStreamEntity gzipStreamEntity;
private Cipher cipher = null;
private Context context = null;
private long reloadPeriod = -1;
public KKAPIRequest(String url, Cipher cipher, long reloadPeriod, Context context) {
this(url, cipher, 10000);
this.reloadPeriod = reloadPeriod;
this.context = context;
}
public KKAPIRequest(String url, Cipher cipher) {
this(url, cipher, 10000);
}
public KKAPIRequest(String url, Cipher cipher, int socketTimeout) {
BasicHttpParams params = new BasicHttpParams();
params.setIntParameter(HttpConnectionParams.CONNECTION_TIMEOUT, 10000);
params.setIntParameter(HttpConnectionParams.SO_TIMEOUT, socketTimeout);
httpclient = new DefaultHttpClient(params);
this.url = url;
this.cipher = cipher;
}
public void addGetParam(String key, String value) {
if (getParams == "") {
getParams = "?";
} else {
getParams += "&";
}
getParams += key + "=" + value;
}
public void addGetParam(String parameter) {
if (getParams == "") {
getParams = "?";
} else {
getParams += "&";
}
getParams += parameter;
}
public void addPostParam(String key, String value) {
if (postParams == null) {
postParams = new ArrayList<NameValuePair>();
}
postParams.add((new BasicNameValuePair(key, value)));
}
public void addHeaderParam(String key, String value) {
if (headerParams == null) {
headerParams = new ArrayList<NameValuePair>();
}
headerParams.add((new BasicNameValuePair(key, value)));
}
public void addMultiPartPostParam(String key, ContentBody contentBody) {
if (multipartEntity == null) {
multipartEntity = new MultipartEntity(HttpMultipartMode.BROWSER_COMPATIBLE);
}
multipartEntity.addPart(key, contentBody);
}
public void addStringPostParam(String data) {
try {
stringEntity = new StringEntity(data, HTTP.UTF_8);
} catch (Exception e) {};
}
public void addFilePostParam(String path) {
fileEntity = new FileEntity(new File(path), URLEncodedUtils.CONTENT_TYPE + HTTP.CHARSET_PARAM + HTTP.UTF_8);
}
public void addByteArrayPostParam(byte[] data) {
byteArrayEntity = new ByteArrayEntity(data);
byteArrayEntity.setContentType("application/octet-stream");
}
public void addGZIPPostParam(String key, String value) {
try {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
ArrayList<NameValuePair> postParams = new ArrayList<NameValuePair>();
postParams.add((new BasicNameValuePair(key, value)));
GZIPOutputStream gZIPOutputStream = new GZIPOutputStream(byteArrayOutputStream);
gZIPOutputStream.write(EntityUtils.toByteArray(new UrlEncodedFormEntity(postParams, HTTP.UTF_8)));
gZIPOutputStream.close();
byte[] byteDataForGZIP = byteArrayOutputStream.toByteArray();
byteArrayOutputStream.close();
gzipStreamEntity = new InputStreamEntity(new ByteArrayInputStream(byteDataForGZIP), byteDataForGZIP.length);
gzipStreamEntity.setContentType("application/x-www-form-urlencoded");
gzipStreamEntity.setContentEncoding("gzip");
} catch (Exception e) {}
}
public void cancel() {
listener = null;
this.cancel(true);
}
@Override
public Void doInBackground(Object... params) {
int readLength;
final ByteArrayOutputStream data = new ByteArrayOutputStream();
final byte[] buffer = new byte[128];
listener = (KKAPIRequestListener) params[0];
int retryTimes = 0;
File cacheFile = null;
ConnectivityManager connectivityManager = null;
if (context != null) {
final File cacheDir = new File(context.getCacheDir().getAbsolutePath() + File.separator + "api");
if (!cacheDir.exists()) {
cacheDir.mkdir();
}
cacheFile = new File(cacheDir.getAbsolutePath() + File.separator + StringUtils.getMd5Hash(url + getParams));
connectivityManager = (ConnectivityManager) context.getSystemService(context.CONNECTIVITY_SERVICE);
}
if (context != null && reloadPeriod > 0 && cacheFile.exists()
&& ((System.currentTimeMillis() - cacheFile.lastModified() < reloadPeriod)
|| connectivityManager == null)) {
try {
InputStream inputStream = new FileInputStream(cacheFile);
while ((readLength = inputStream.read(buffer, 0, buffer.length)) != -1) {
data.write(buffer, 0, readLength);
}
data.flush();
} catch (IOException e) {
e.printStackTrace();
}
} else {
do {
try {
HttpResponse response;
if (postParams != null || multipartEntity != null || stringEntity != null || fileEntity != null
|| byteArrayEntity != null
|| gzipStreamEntity != null || (headerParams != null && postParams != null)) {
final HttpPost httppost = new HttpPost(url + getParams);
if (postParams != null) {
httppost.setEntity(new UrlEncodedFormEntity(postParams, HTTP.UTF_8));
}
if (multipartEntity != null) {
httppost.setEntity(multipartEntity);
}
if (stringEntity != null) {
httppost.setEntity(stringEntity);
}
if (fileEntity != null) {
httppost.setEntity(fileEntity);
}
if (byteArrayEntity != null) {
httppost.setEntity(byteArrayEntity);
}
if (gzipStreamEntity != null) {
httppost.setHeader("Accept-Encoding", "gzip");
httppost.setEntity(gzipStreamEntity);
}
if (headerParams != null) {
for (NameValuePair header : headerParams) {
httppost.setHeader(header.getName(), header.getValue());
}
}
response = httpclient.execute(httppost);
} else {
final HttpGet httpGet = new HttpGet(url + getParams);
if (headerParams != null) {
for (NameValuePair header : headerParams) {
httpGet.setHeader(header.getName(), header.getValue());
}
}
response = httpclient.execute(httpGet);
}
httpStatusCode = response.getStatusLine().getStatusCode();
switch (httpStatusCode) {
case 200:
case 201:
case 202:
case 203:
case 204:
case 205:
case 206:
case 207:
final InputStream is;
Header contentEncoding = response.getFirstHeader("Content-Encoding");
if (contentEncoding != null && contentEncoding.getValue().equalsIgnoreCase("gzip")) {
byte[] inputStreamBuffer = new byte[8192];
int length;
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
GZIPInputStream gZIPInputStream = new GZIPInputStream(new ByteArrayInputStream(
EntityUtils.toByteArray(response.getEntity())));
while ((length = gZIPInputStream.read(inputStreamBuffer)) >= 0) {
byteArrayOutputStream.write(inputStreamBuffer, 0, length);
}
is = new ByteArrayInputStream(byteArrayOutputStream.toByteArray());
gZIPInputStream.close();
byteArrayOutputStream.close();
} else {
is = response.getEntity().getContent();
}
while ((readLength = is.read(buffer, 0, buffer.length)) != -1) {
data.write(buffer, 0, readLength);
}
data.flush();
isNetworkError = false;
break;
case 404:
case 403:
case 400:
+ case 412:
isHttpStatusError = true;
isNetworkError = false;
break;
default:
KKDebug.w("connetion to " + url + getParams + " returns " + httpStatusCode);
retryTimes++;
isNetworkError = true;
SystemClock.sleep(1000);
break;
}
response.getEntity().consumeContent();
} catch (final Exception e) {
KKDebug.w("connetion to " + url + getParams + " failed!");
retryTimes++;
isNetworkError = true;
SystemClock.sleep(1000);
}
} while (isNetworkError && retryTimes < 3);
}
try {
if (!isNetworkError && !isHttpStatusError) {
if (listener != null) {
String jsonData;
if (cipher != null) {
jsonData = new String(cipher.doFinal(data.toByteArray()));
} else {
jsonData = data.toString();
}
listener.onPreComplete(jsonData);
if (context != null) {
try {
FileOutputStream fileOutputStream = new FileOutputStream(cacheFile);
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(fileOutputStream);
outputStreamWriter.write(jsonData);
outputStreamWriter.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
} catch (Exception e) {}
return null;
}
@Override
public void onPostExecute(Void v) {
if (listener == null) {
return;
}
if (isHttpStatusError) {
listener.onHttpStatusError(httpStatusCode);
} else if (isNetworkError) {
listener.onNetworkError();
} else {
listener.onComplete();
}
}
}
| true
| true
|
public Void doInBackground(Object... params) {
int readLength;
final ByteArrayOutputStream data = new ByteArrayOutputStream();
final byte[] buffer = new byte[128];
listener = (KKAPIRequestListener) params[0];
int retryTimes = 0;
File cacheFile = null;
ConnectivityManager connectivityManager = null;
if (context != null) {
final File cacheDir = new File(context.getCacheDir().getAbsolutePath() + File.separator + "api");
if (!cacheDir.exists()) {
cacheDir.mkdir();
}
cacheFile = new File(cacheDir.getAbsolutePath() + File.separator + StringUtils.getMd5Hash(url + getParams));
connectivityManager = (ConnectivityManager) context.getSystemService(context.CONNECTIVITY_SERVICE);
}
if (context != null && reloadPeriod > 0 && cacheFile.exists()
&& ((System.currentTimeMillis() - cacheFile.lastModified() < reloadPeriod)
|| connectivityManager == null)) {
try {
InputStream inputStream = new FileInputStream(cacheFile);
while ((readLength = inputStream.read(buffer, 0, buffer.length)) != -1) {
data.write(buffer, 0, readLength);
}
data.flush();
} catch (IOException e) {
e.printStackTrace();
}
} else {
do {
try {
HttpResponse response;
if (postParams != null || multipartEntity != null || stringEntity != null || fileEntity != null
|| byteArrayEntity != null
|| gzipStreamEntity != null || (headerParams != null && postParams != null)) {
final HttpPost httppost = new HttpPost(url + getParams);
if (postParams != null) {
httppost.setEntity(new UrlEncodedFormEntity(postParams, HTTP.UTF_8));
}
if (multipartEntity != null) {
httppost.setEntity(multipartEntity);
}
if (stringEntity != null) {
httppost.setEntity(stringEntity);
}
if (fileEntity != null) {
httppost.setEntity(fileEntity);
}
if (byteArrayEntity != null) {
httppost.setEntity(byteArrayEntity);
}
if (gzipStreamEntity != null) {
httppost.setHeader("Accept-Encoding", "gzip");
httppost.setEntity(gzipStreamEntity);
}
if (headerParams != null) {
for (NameValuePair header : headerParams) {
httppost.setHeader(header.getName(), header.getValue());
}
}
response = httpclient.execute(httppost);
} else {
final HttpGet httpGet = new HttpGet(url + getParams);
if (headerParams != null) {
for (NameValuePair header : headerParams) {
httpGet.setHeader(header.getName(), header.getValue());
}
}
response = httpclient.execute(httpGet);
}
httpStatusCode = response.getStatusLine().getStatusCode();
switch (httpStatusCode) {
case 200:
case 201:
case 202:
case 203:
case 204:
case 205:
case 206:
case 207:
final InputStream is;
Header contentEncoding = response.getFirstHeader("Content-Encoding");
if (contentEncoding != null && contentEncoding.getValue().equalsIgnoreCase("gzip")) {
byte[] inputStreamBuffer = new byte[8192];
int length;
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
GZIPInputStream gZIPInputStream = new GZIPInputStream(new ByteArrayInputStream(
EntityUtils.toByteArray(response.getEntity())));
while ((length = gZIPInputStream.read(inputStreamBuffer)) >= 0) {
byteArrayOutputStream.write(inputStreamBuffer, 0, length);
}
is = new ByteArrayInputStream(byteArrayOutputStream.toByteArray());
gZIPInputStream.close();
byteArrayOutputStream.close();
} else {
is = response.getEntity().getContent();
}
while ((readLength = is.read(buffer, 0, buffer.length)) != -1) {
data.write(buffer, 0, readLength);
}
data.flush();
isNetworkError = false;
break;
case 404:
case 403:
case 400:
isHttpStatusError = true;
isNetworkError = false;
break;
default:
KKDebug.w("connetion to " + url + getParams + " returns " + httpStatusCode);
retryTimes++;
isNetworkError = true;
SystemClock.sleep(1000);
break;
}
response.getEntity().consumeContent();
} catch (final Exception e) {
KKDebug.w("connetion to " + url + getParams + " failed!");
retryTimes++;
isNetworkError = true;
SystemClock.sleep(1000);
}
} while (isNetworkError && retryTimes < 3);
}
try {
if (!isNetworkError && !isHttpStatusError) {
if (listener != null) {
String jsonData;
if (cipher != null) {
jsonData = new String(cipher.doFinal(data.toByteArray()));
} else {
jsonData = data.toString();
}
listener.onPreComplete(jsonData);
if (context != null) {
try {
FileOutputStream fileOutputStream = new FileOutputStream(cacheFile);
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(fileOutputStream);
outputStreamWriter.write(jsonData);
outputStreamWriter.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
} catch (Exception e) {}
return null;
}
|
public Void doInBackground(Object... params) {
int readLength;
final ByteArrayOutputStream data = new ByteArrayOutputStream();
final byte[] buffer = new byte[128];
listener = (KKAPIRequestListener) params[0];
int retryTimes = 0;
File cacheFile = null;
ConnectivityManager connectivityManager = null;
if (context != null) {
final File cacheDir = new File(context.getCacheDir().getAbsolutePath() + File.separator + "api");
if (!cacheDir.exists()) {
cacheDir.mkdir();
}
cacheFile = new File(cacheDir.getAbsolutePath() + File.separator + StringUtils.getMd5Hash(url + getParams));
connectivityManager = (ConnectivityManager) context.getSystemService(context.CONNECTIVITY_SERVICE);
}
if (context != null && reloadPeriod > 0 && cacheFile.exists()
&& ((System.currentTimeMillis() - cacheFile.lastModified() < reloadPeriod)
|| connectivityManager == null)) {
try {
InputStream inputStream = new FileInputStream(cacheFile);
while ((readLength = inputStream.read(buffer, 0, buffer.length)) != -1) {
data.write(buffer, 0, readLength);
}
data.flush();
} catch (IOException e) {
e.printStackTrace();
}
} else {
do {
try {
HttpResponse response;
if (postParams != null || multipartEntity != null || stringEntity != null || fileEntity != null
|| byteArrayEntity != null
|| gzipStreamEntity != null || (headerParams != null && postParams != null)) {
final HttpPost httppost = new HttpPost(url + getParams);
if (postParams != null) {
httppost.setEntity(new UrlEncodedFormEntity(postParams, HTTP.UTF_8));
}
if (multipartEntity != null) {
httppost.setEntity(multipartEntity);
}
if (stringEntity != null) {
httppost.setEntity(stringEntity);
}
if (fileEntity != null) {
httppost.setEntity(fileEntity);
}
if (byteArrayEntity != null) {
httppost.setEntity(byteArrayEntity);
}
if (gzipStreamEntity != null) {
httppost.setHeader("Accept-Encoding", "gzip");
httppost.setEntity(gzipStreamEntity);
}
if (headerParams != null) {
for (NameValuePair header : headerParams) {
httppost.setHeader(header.getName(), header.getValue());
}
}
response = httpclient.execute(httppost);
} else {
final HttpGet httpGet = new HttpGet(url + getParams);
if (headerParams != null) {
for (NameValuePair header : headerParams) {
httpGet.setHeader(header.getName(), header.getValue());
}
}
response = httpclient.execute(httpGet);
}
httpStatusCode = response.getStatusLine().getStatusCode();
switch (httpStatusCode) {
case 200:
case 201:
case 202:
case 203:
case 204:
case 205:
case 206:
case 207:
final InputStream is;
Header contentEncoding = response.getFirstHeader("Content-Encoding");
if (contentEncoding != null && contentEncoding.getValue().equalsIgnoreCase("gzip")) {
byte[] inputStreamBuffer = new byte[8192];
int length;
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
GZIPInputStream gZIPInputStream = new GZIPInputStream(new ByteArrayInputStream(
EntityUtils.toByteArray(response.getEntity())));
while ((length = gZIPInputStream.read(inputStreamBuffer)) >= 0) {
byteArrayOutputStream.write(inputStreamBuffer, 0, length);
}
is = new ByteArrayInputStream(byteArrayOutputStream.toByteArray());
gZIPInputStream.close();
byteArrayOutputStream.close();
} else {
is = response.getEntity().getContent();
}
while ((readLength = is.read(buffer, 0, buffer.length)) != -1) {
data.write(buffer, 0, readLength);
}
data.flush();
isNetworkError = false;
break;
case 404:
case 403:
case 400:
case 412:
isHttpStatusError = true;
isNetworkError = false;
break;
default:
KKDebug.w("connetion to " + url + getParams + " returns " + httpStatusCode);
retryTimes++;
isNetworkError = true;
SystemClock.sleep(1000);
break;
}
response.getEntity().consumeContent();
} catch (final Exception e) {
KKDebug.w("connetion to " + url + getParams + " failed!");
retryTimes++;
isNetworkError = true;
SystemClock.sleep(1000);
}
} while (isNetworkError && retryTimes < 3);
}
try {
if (!isNetworkError && !isHttpStatusError) {
if (listener != null) {
String jsonData;
if (cipher != null) {
jsonData = new String(cipher.doFinal(data.toByteArray()));
} else {
jsonData = data.toString();
}
listener.onPreComplete(jsonData);
if (context != null) {
try {
FileOutputStream fileOutputStream = new FileOutputStream(cacheFile);
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(fileOutputStream);
outputStreamWriter.write(jsonData);
outputStreamWriter.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
} catch (Exception e) {}
return null;
}
|
diff --git a/dspace-jspui/src/main/java/org/dspace/app/webui/servlet/FeedServlet.java b/dspace-jspui/src/main/java/org/dspace/app/webui/servlet/FeedServlet.java
index dc22d6211..c8e0b6c95 100644
--- a/dspace-jspui/src/main/java/org/dspace/app/webui/servlet/FeedServlet.java
+++ b/dspace-jspui/src/main/java/org/dspace/app/webui/servlet/FeedServlet.java
@@ -1,672 +1,672 @@
/*
* FeedServlet.java
*
* Version: $Revision$
*
* Date: $Date$
*
* Copyright (c) 2002-2005, Hewlett-Packard Company and Massachusetts
* Institute of Technology. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* - Neither the name of the Hewlett-Packard Company nor the name of the
* Massachusetts Institute of Technology nor the names of their
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*/
package org.dspace.app.webui.servlet;
import java.io.IOException;
import java.sql.SQLException;
import java.text.MessageFormat;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.ResourceBundle;
import java.util.StringTokenizer;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.app.webui.util.JSPManager;
import org.dspace.authorize.AuthorizeException;
import org.dspace.browse.BrowseEngine;
import org.dspace.browse.BrowseException;
import org.dspace.browse.BrowseIndex;
import org.dspace.browse.BrowseInfo;
import org.dspace.browse.BrowserScope;
import org.dspace.sort.SortOption;
import org.dspace.sort.SortException;
import org.dspace.content.Bitstream;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DCDate;
import org.dspace.content.DCValue;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.handle.HandleManager;
import org.dspace.search.Harvest;
import com.sun.syndication.feed.rss.Channel;
import com.sun.syndication.feed.rss.Description;
import com.sun.syndication.feed.rss.Image;
import com.sun.syndication.feed.rss.TextInput;
import com.sun.syndication.io.FeedException;
import com.sun.syndication.io.WireFeedOutput;
/**
* Servlet for handling requests for a syndication feed. The Handle of the collection
* or community is extracted from the URL, e.g: <code>/feed/rss_1.0/1234/5678</code>.
* Currently supports only RSS feed formats.
*
* @author Ben Bosman, Richard Rodgers
* @version $Revision$
*/
public class FeedServlet extends DSpaceServlet
{
// key for site-wide feed
public static final String SITE_FEED_KEY = "site";
// one hour in milliseconds
private static final long HOUR_MSECS = 60 * 60 * 1000;
/** log4j category */
private static Logger log = Logger.getLogger(FeedServlet.class);
private String clazz = "org.dspace.app.webui.servlet.FeedServlet";
// are syndication feeds enabled?
private static boolean enabled = false;
// number of DSpace items per feed
private static int itemCount = 0;
// optional cache of feeds
private static Map feedCache = null;
// maximum size of cache - 0 means caching disabled
private static int cacheSize = 0;
// how many days to keep a feed in cache before checking currency
private static int cacheAge = 0;
// supported syndication formats
private static List formats = null;
// localized resource bundle
private static ResourceBundle labels = null;
//default fields to display in item description
private static String defaultDescriptionFields = "dc.title, dc.contributor.author, dc.contributor.editor, dc.description.abstract, dc.description";
static
{
enabled = ConfigurationManager.getBooleanProperty("webui.feed.enable");
}
public void init()
{
// read rest of config info if enabled
if (enabled)
{
String fmtsStr = ConfigurationManager.getProperty("webui.feed.formats");
if ( fmtsStr != null )
{
formats = new ArrayList();
String[] fmts = fmtsStr.split(",");
for (int i = 0; i < fmts.length; i++)
{
formats.add(fmts[i]);
}
}
itemCount = ConfigurationManager.getIntProperty("webui.feed.items");
cacheSize = ConfigurationManager.getIntProperty("webui.feed.cache.size");
if (cacheSize > 0)
{
feedCache = new HashMap();
cacheAge = ConfigurationManager.getIntProperty("webui.feed.cache.age");
}
}
}
protected void doDSGet(Context context, HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException,
SQLException, AuthorizeException
{
String path = request.getPathInfo();
String feedType = null;
String handle = null;
if(labels==null)
{
// Get access to the localized resource bundle
Locale locale = request.getLocale();
labels = ResourceBundle.getBundle("Messages", locale);
}
if (path != null)
{
// substring(1) is to remove initial '/'
path = path.substring(1);
int split = path.indexOf("/");
if (split != -1)
{
feedType = path.substring(0,split);
handle = path.substring(split+1);
}
}
DSpaceObject dso = null;
//as long as this is not a site wide feed,
//attempt to retrieve the Collection or Community object
if(!handle.equals(SITE_FEED_KEY))
{
// Determine if handle is a valid reference
dso = HandleManager.resolveToObject(context, handle);
}
if (! enabled || (dso != null &&
(dso.getType() != Constants.COLLECTION && dso.getType() != Constants.COMMUNITY)) )
{
log.info(LogManager.getHeader(context, "invalid_id", "path=" + path));
JSPManager.showInvalidIDError(request, response, path, -1);
return;
}
// Determine if requested format is supported
if( feedType == null || ! formats.contains( feedType ) )
{
log.info(LogManager.getHeader(context, "invalid_syndformat", "path=" + path));
JSPManager.showInvalidIDError(request, response, path, -1);
return;
}
// Lookup or generate the feed
Channel channel = null;
if (feedCache != null)
{
// Cache key is handle
CacheFeed cFeed = (CacheFeed)feedCache.get(handle);
if (cFeed != null) // cache hit, but...
{
// Is the feed current?
boolean cacheFeedCurrent = false;
if (cFeed.timeStamp + (cacheAge * HOUR_MSECS) < System.currentTimeMillis())
{
cacheFeedCurrent = true;
}
// Not current, but have any items changed since feed was created/last checked?
else if ( ! itemsChanged(context, dso, cFeed.timeStamp))
{
// no items have changed, re-stamp feed and use it
cFeed.timeStamp = System.currentTimeMillis();
cacheFeedCurrent = true;
}
if (cacheFeedCurrent)
{
channel = cFeed.access();
}
}
}
// either not caching, not found in cache, or feed in cache not current
if (channel == null)
{
channel = generateFeed(context, dso);
if (feedCache != null)
{
cache(handle, new CacheFeed(channel));
}
}
// set the feed to the requested type & return it
channel.setFeedType(feedType);
WireFeedOutput feedWriter = new WireFeedOutput();
try
{
response.setContentType("text/xml; charset=UTF-8");
feedWriter.output(channel, response.getWriter());
}
catch( FeedException fex )
{
throw new IOException(fex.getMessage());
}
}
private boolean itemsChanged(Context context, DSpaceObject dso, long timeStamp)
throws SQLException
{
// construct start and end dates
DCDate dcStartDate = new DCDate( new Date(timeStamp) );
DCDate dcEndDate = new DCDate( new Date(System.currentTimeMillis()) );
// convert dates to ISO 8601, stripping the time
String startDate = dcStartDate.toString().substring(0, 10);
String endDate = dcEndDate.toString().substring(0, 10);
// this invocation should return a non-empty list if even 1 item has changed
try {
return (Harvest.harvest(context, dso, startDate, endDate,
0, 1, false, false, false).size() > 0);
}
catch (ParseException pe)
{
// This should never get thrown as we have generated the dates ourselves
return false;
}
}
/**
* Generate a syndication feed for a collection or community
* or community
*
* @param context the DSpace context object
*
* @param dso DSpace object - collection or community
*
* @return an object representing the feed
*/
private Channel generateFeed(Context context, DSpaceObject dso)
throws IOException, SQLException
{
try
{
// container-level elements
String dspaceUrl = ConfigurationManager.getProperty("dspace.url");
String type = null;
String description = null;
String title = null;
Bitstream logo = null;
// browse scope
// BrowseScope scope = new BrowseScope(context);
// new method of doing the browse:
String idx = ConfigurationManager.getProperty("recent.submissions.sort-option");
if (idx == null)
{
throw new IOException("There is no configuration supplied for: recent.submissions.sort-option");
}
BrowseIndex bix = BrowseIndex.getItemBrowseIndex();
if (bix == null)
{
throw new IOException("There is no browse index with the name: " + idx);
}
BrowserScope scope = new BrowserScope(context);
scope.setBrowseIndex(bix);
for (SortOption so : SortOption.getSortOptions())
{
if (so.getName().equals(idx))
scope.setSortBy(so.getNumber());
}
scope.setOrder(SortOption.DESCENDING);
// the feed
Channel channel = new Channel();
//Special Case: if DSpace Object passed in is null,
//generate a feed for the entire DSpace site!
if(dso == null)
{
channel.setTitle(ConfigurationManager.getProperty("dspace.name"));
channel.setLink(dspaceUrl);
channel.setDescription(labels.getString(clazz + ".general-feed.description"));
}
else //otherwise, this is a Collection or Community specific feed
{
if (dso.getType() == Constants.COLLECTION)
{
type = labels.getString(clazz + ".feed-type.collection");
Collection col = (Collection)dso;
description = col.getMetadata("short_description");
title = col.getMetadata("name");
logo = col.getLogo();
// scope.setScope(col);
scope.setBrowseContainer(col);
}
else if (dso.getType() == Constants.COMMUNITY)
{
type = labels.getString(clazz + ".feed-type.community");
Community comm = (Community)dso;
description = comm.getMetadata("short_description");
title = comm.getMetadata("name");
logo = comm.getLogo();
// scope.setScope(comm);
scope.setBrowseContainer(comm);
}
String objectUrl = ConfigurationManager.getBooleanProperty("webui.feed.localresolve")
? HandleManager.resolveToURL(context, dso.getHandle())
: HandleManager.getCanonicalForm(dso.getHandle());
// put in container-level data
channel.setDescription(description);
channel.setLink(objectUrl);
//build channel title by passing in type and title
String channelTitle = MessageFormat.format(labels.getString(clazz + ".feed.title"),
new Object[]{type, title});
channel.setTitle(channelTitle);
//if collection or community has a logo
if (logo != null)
{
// we use the path to the logo for this, the logo itself cannot
// be contained in the rdf. Not all RSS-viewers show this logo.
Image image = new Image();
image.setLink(objectUrl);
image.setTitle(labels.getString(clazz + ".logo.title"));
image.setUrl(dspaceUrl + "/retrieve/" + logo.getID());
channel.setImage(image);
}
}
// this is a direct link to the search-engine of dspace. It searches
// in the current collection. Since the current version of DSpace
// can't search within collections anymore, this works only in older
// version until this bug is fixed.
TextInput input = new TextInput();
input.setLink(dspaceUrl + "/simple-search");
input.setDescription( labels.getString(clazz + ".search.description") );
String searchTitle = "";
//if a "type" of feed was specified, build search title off that
if(type!=null)
{
searchTitle = MessageFormat.format(labels.getString(clazz + ".search.title"),
new Object[]{type});
}
else //otherwise, default to a more generic search title
{
searchTitle = labels.getString(clazz + ".search.title.default");
}
input.setTitle(searchTitle);
input.setName(labels.getString(clazz + ".search.name"));
channel.setTextInput(input);
// gather & add items to the feed.
scope.setResultsPerPage(itemCount);
BrowseEngine be = new BrowseEngine(context);
BrowseInfo bi = be.browseMini(scope);
Item[] results = bi.getItemResults(context);
List items = new ArrayList();
for (int i = 0; i < results.length; i++)
{
items.add(itemFromDSpaceItem(context, results[i]));
}
channel.setItems(items);
// If the description is null, replace it with an empty string
// to avoid a FeedException
if (channel.getDescription() == null)
channel.setDescription("");
return channel;
}
catch (SortException se)
{
log.error("caught exception: ", se);
- throw new IOException(se);
+ throw new IOException(se.getMessage());
}
catch (BrowseException e)
{
log.error("caught exception: ", e);
throw new IOException(e.getMessage());
}
}
/**
* The metadata fields of the given item will be added to the given feed.
*
* @param context DSpace context object
*
* @param dspaceItem DSpace Item
*
* @return an object representing a feed entry
*/
private com.sun.syndication.feed.rss.Item itemFromDSpaceItem(Context context,
Item dspaceItem)
throws SQLException
{
com.sun.syndication.feed.rss.Item rssItem =
new com.sun.syndication.feed.rss.Item();
//get the title and date fields
String titleField = ConfigurationManager.getProperty("webui.feed.item.title");
if (titleField == null)
{
titleField = "dc.title";
}
String dateField = ConfigurationManager.getProperty("webui.feed.item.date");
if (dateField == null)
{
dateField = "dc.date.issued";
}
//Set item handle
String itHandle = ConfigurationManager.getBooleanProperty("webui.feed.localresolve")
? HandleManager.resolveToURL(context, dspaceItem.getHandle())
: HandleManager.getCanonicalForm(dspaceItem.getHandle());
rssItem.setLink(itHandle);
//get first title
String title = null;
try
{
title = dspaceItem.getMetadata(titleField)[0].value;
}
catch (ArrayIndexOutOfBoundsException e)
{
title = labels.getString(clazz + ".notitle");
}
rssItem.setTitle(title);
// We put some metadata in the description field. This field is
// displayed by most RSS viewers
String descriptionFields = ConfigurationManager
.getProperty("webui.feed.item.description");
if (descriptionFields == null)
{
descriptionFields = defaultDescriptionFields;
}
//loop through all the metadata fields to put in the description
StringBuffer descBuf = new StringBuffer();
StringTokenizer st = new StringTokenizer(descriptionFields, ",");
while (st.hasMoreTokens())
{
String field = st.nextToken().trim();
boolean isDate = false;
// Find out if the field should rendered as a date
if (field.indexOf("(date)") > 0)
{
field = field.replaceAll("\\(date\\)", "");
isDate = true;
}
//print out this field, along with its value(s)
DCValue[] values = dspaceItem.getMetadata(field);
if(values != null && values.length>0)
{
//as long as there is already something in the description
//buffer, print out a few line breaks before the next field
if(descBuf.length() > 0)
{
descBuf.append("\n<br/>");
descBuf.append("\n<br/>");
}
String fieldLabel = null;
try
{
fieldLabel = labels.getString("metadata." + field);
}
catch(java.util.MissingResourceException e) {}
if(fieldLabel !=null && fieldLabel.length()>0)
descBuf.append(fieldLabel + ": ");
for(int i=0; i<values.length; i++)
{
String fieldValue = values[i].value;
if(isDate)
fieldValue = (new DCDate(fieldValue)).toString();
descBuf.append(fieldValue);
if (i < values.length - 1)
{
descBuf.append("; ");
}
}
}
}//end while
Description descrip = new Description();
descrip.setValue(descBuf.toString());
rssItem.setDescription(descrip);
// set date field
String dcDate = null;
try
{
dcDate = dspaceItem.getMetadata(dateField)[0].value;
}
catch (ArrayIndexOutOfBoundsException e)
{
}
if (dcDate != null)
{
rssItem.setPubDate((new DCDate(dcDate)).toDate());
}
return rssItem;
}
/************************************************
* private cache management classes and methods *
************************************************/
/**
* Add a feed to the cache - reducing the size of the cache by 1 to make room if
* necessary. The removed entry has an access count equal to the minumum in the cache.
* @param feedKey
* The cache key for the feed
* @param newFeed
* The CacheFeed feed to be cached
*/
private static void cache(String feedKey, CacheFeed newFeed)
{
// remove older feed to make room if cache full
if (feedCache.size() >= cacheSize)
{
// cache profiling data
int total = 0;
String minKey = null;
CacheFeed minFeed = null;
CacheFeed maxFeed = null;
Iterator iter = feedCache.keySet().iterator();
while (iter.hasNext())
{
String key = (String)iter.next();
CacheFeed feed = (CacheFeed)feedCache.get(key);
if (minKey != null)
{
if (feed.hits < minFeed.hits)
{
minKey = key;
minFeed = feed;
}
if (feed.hits >= maxFeed.hits)
{
maxFeed = feed;
}
}
else
{
minKey = key;
minFeed = maxFeed = feed;
}
total += feed.hits;
}
// log a profile of the cache to assist administrator in tuning it
int avg = total / feedCache.size();
String logMsg = "feedCache() - size: " + feedCache.size() +
" Hits - total: " + total + " avg: " + avg +
" max: " + maxFeed.hits + " min: " + minFeed.hits;
log.info(logMsg);
// remove minimum hits entry
feedCache.remove(minKey);
}
// add feed to cache
feedCache.put(feedKey, newFeed);
}
/**
* Class to instrument accesses & currency of a given feed in cache
*/
private class CacheFeed
{
// currency timestamp
public long timeStamp = 0L;
// access count
public int hits = 0;
// the feed
private Channel feed = null;
public CacheFeed(Channel feed)
{
this.feed = feed;
timeStamp = System.currentTimeMillis();
}
public Channel access()
{
++hits;
return feed;
}
}
}
| true
| true
|
private Channel generateFeed(Context context, DSpaceObject dso)
throws IOException, SQLException
{
try
{
// container-level elements
String dspaceUrl = ConfigurationManager.getProperty("dspace.url");
String type = null;
String description = null;
String title = null;
Bitstream logo = null;
// browse scope
// BrowseScope scope = new BrowseScope(context);
// new method of doing the browse:
String idx = ConfigurationManager.getProperty("recent.submissions.sort-option");
if (idx == null)
{
throw new IOException("There is no configuration supplied for: recent.submissions.sort-option");
}
BrowseIndex bix = BrowseIndex.getItemBrowseIndex();
if (bix == null)
{
throw new IOException("There is no browse index with the name: " + idx);
}
BrowserScope scope = new BrowserScope(context);
scope.setBrowseIndex(bix);
for (SortOption so : SortOption.getSortOptions())
{
if (so.getName().equals(idx))
scope.setSortBy(so.getNumber());
}
scope.setOrder(SortOption.DESCENDING);
// the feed
Channel channel = new Channel();
//Special Case: if DSpace Object passed in is null,
//generate a feed for the entire DSpace site!
if(dso == null)
{
channel.setTitle(ConfigurationManager.getProperty("dspace.name"));
channel.setLink(dspaceUrl);
channel.setDescription(labels.getString(clazz + ".general-feed.description"));
}
else //otherwise, this is a Collection or Community specific feed
{
if (dso.getType() == Constants.COLLECTION)
{
type = labels.getString(clazz + ".feed-type.collection");
Collection col = (Collection)dso;
description = col.getMetadata("short_description");
title = col.getMetadata("name");
logo = col.getLogo();
// scope.setScope(col);
scope.setBrowseContainer(col);
}
else if (dso.getType() == Constants.COMMUNITY)
{
type = labels.getString(clazz + ".feed-type.community");
Community comm = (Community)dso;
description = comm.getMetadata("short_description");
title = comm.getMetadata("name");
logo = comm.getLogo();
// scope.setScope(comm);
scope.setBrowseContainer(comm);
}
String objectUrl = ConfigurationManager.getBooleanProperty("webui.feed.localresolve")
? HandleManager.resolveToURL(context, dso.getHandle())
: HandleManager.getCanonicalForm(dso.getHandle());
// put in container-level data
channel.setDescription(description);
channel.setLink(objectUrl);
//build channel title by passing in type and title
String channelTitle = MessageFormat.format(labels.getString(clazz + ".feed.title"),
new Object[]{type, title});
channel.setTitle(channelTitle);
//if collection or community has a logo
if (logo != null)
{
// we use the path to the logo for this, the logo itself cannot
// be contained in the rdf. Not all RSS-viewers show this logo.
Image image = new Image();
image.setLink(objectUrl);
image.setTitle(labels.getString(clazz + ".logo.title"));
image.setUrl(dspaceUrl + "/retrieve/" + logo.getID());
channel.setImage(image);
}
}
// this is a direct link to the search-engine of dspace. It searches
// in the current collection. Since the current version of DSpace
// can't search within collections anymore, this works only in older
// version until this bug is fixed.
TextInput input = new TextInput();
input.setLink(dspaceUrl + "/simple-search");
input.setDescription( labels.getString(clazz + ".search.description") );
String searchTitle = "";
//if a "type" of feed was specified, build search title off that
if(type!=null)
{
searchTitle = MessageFormat.format(labels.getString(clazz + ".search.title"),
new Object[]{type});
}
else //otherwise, default to a more generic search title
{
searchTitle = labels.getString(clazz + ".search.title.default");
}
input.setTitle(searchTitle);
input.setName(labels.getString(clazz + ".search.name"));
channel.setTextInput(input);
// gather & add items to the feed.
scope.setResultsPerPage(itemCount);
BrowseEngine be = new BrowseEngine(context);
BrowseInfo bi = be.browseMini(scope);
Item[] results = bi.getItemResults(context);
List items = new ArrayList();
for (int i = 0; i < results.length; i++)
{
items.add(itemFromDSpaceItem(context, results[i]));
}
channel.setItems(items);
// If the description is null, replace it with an empty string
// to avoid a FeedException
if (channel.getDescription() == null)
channel.setDescription("");
return channel;
}
catch (SortException se)
{
log.error("caught exception: ", se);
throw new IOException(se);
}
catch (BrowseException e)
{
log.error("caught exception: ", e);
throw new IOException(e.getMessage());
}
}
|
private Channel generateFeed(Context context, DSpaceObject dso)
throws IOException, SQLException
{
try
{
// container-level elements
String dspaceUrl = ConfigurationManager.getProperty("dspace.url");
String type = null;
String description = null;
String title = null;
Bitstream logo = null;
// browse scope
// BrowseScope scope = new BrowseScope(context);
// new method of doing the browse:
String idx = ConfigurationManager.getProperty("recent.submissions.sort-option");
if (idx == null)
{
throw new IOException("There is no configuration supplied for: recent.submissions.sort-option");
}
BrowseIndex bix = BrowseIndex.getItemBrowseIndex();
if (bix == null)
{
throw new IOException("There is no browse index with the name: " + idx);
}
BrowserScope scope = new BrowserScope(context);
scope.setBrowseIndex(bix);
for (SortOption so : SortOption.getSortOptions())
{
if (so.getName().equals(idx))
scope.setSortBy(so.getNumber());
}
scope.setOrder(SortOption.DESCENDING);
// the feed
Channel channel = new Channel();
//Special Case: if DSpace Object passed in is null,
//generate a feed for the entire DSpace site!
if(dso == null)
{
channel.setTitle(ConfigurationManager.getProperty("dspace.name"));
channel.setLink(dspaceUrl);
channel.setDescription(labels.getString(clazz + ".general-feed.description"));
}
else //otherwise, this is a Collection or Community specific feed
{
if (dso.getType() == Constants.COLLECTION)
{
type = labels.getString(clazz + ".feed-type.collection");
Collection col = (Collection)dso;
description = col.getMetadata("short_description");
title = col.getMetadata("name");
logo = col.getLogo();
// scope.setScope(col);
scope.setBrowseContainer(col);
}
else if (dso.getType() == Constants.COMMUNITY)
{
type = labels.getString(clazz + ".feed-type.community");
Community comm = (Community)dso;
description = comm.getMetadata("short_description");
title = comm.getMetadata("name");
logo = comm.getLogo();
// scope.setScope(comm);
scope.setBrowseContainer(comm);
}
String objectUrl = ConfigurationManager.getBooleanProperty("webui.feed.localresolve")
? HandleManager.resolveToURL(context, dso.getHandle())
: HandleManager.getCanonicalForm(dso.getHandle());
// put in container-level data
channel.setDescription(description);
channel.setLink(objectUrl);
//build channel title by passing in type and title
String channelTitle = MessageFormat.format(labels.getString(clazz + ".feed.title"),
new Object[]{type, title});
channel.setTitle(channelTitle);
//if collection or community has a logo
if (logo != null)
{
// we use the path to the logo for this, the logo itself cannot
// be contained in the rdf. Not all RSS-viewers show this logo.
Image image = new Image();
image.setLink(objectUrl);
image.setTitle(labels.getString(clazz + ".logo.title"));
image.setUrl(dspaceUrl + "/retrieve/" + logo.getID());
channel.setImage(image);
}
}
// this is a direct link to the search-engine of dspace. It searches
// in the current collection. Since the current version of DSpace
// can't search within collections anymore, this works only in older
// version until this bug is fixed.
TextInput input = new TextInput();
input.setLink(dspaceUrl + "/simple-search");
input.setDescription( labels.getString(clazz + ".search.description") );
String searchTitle = "";
//if a "type" of feed was specified, build search title off that
if(type!=null)
{
searchTitle = MessageFormat.format(labels.getString(clazz + ".search.title"),
new Object[]{type});
}
else //otherwise, default to a more generic search title
{
searchTitle = labels.getString(clazz + ".search.title.default");
}
input.setTitle(searchTitle);
input.setName(labels.getString(clazz + ".search.name"));
channel.setTextInput(input);
// gather & add items to the feed.
scope.setResultsPerPage(itemCount);
BrowseEngine be = new BrowseEngine(context);
BrowseInfo bi = be.browseMini(scope);
Item[] results = bi.getItemResults(context);
List items = new ArrayList();
for (int i = 0; i < results.length; i++)
{
items.add(itemFromDSpaceItem(context, results[i]));
}
channel.setItems(items);
// If the description is null, replace it with an empty string
// to avoid a FeedException
if (channel.getDescription() == null)
channel.setDescription("");
return channel;
}
catch (SortException se)
{
log.error("caught exception: ", se);
throw new IOException(se.getMessage());
}
catch (BrowseException e)
{
log.error("caught exception: ", e);
throw new IOException(e.getMessage());
}
}
|
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/Aligner.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/Aligner.java
index 55083a67a0..b821ab1b83 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/Aligner.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/Aligner.java
@@ -1,337 +1,336 @@
//=============================================================================
//=== Copyright (C) 2001-2013 Food and Agriculture Organization of the
//=== United Nations (FAO-UN), United Nations World Food Programme (WFP)
//=== and United Nations Environment Programme (UNEP)
//===
//=== This program is free software; you can redistribute it and/or modify
//=== it under the terms of the GNU General Public License as published by
//=== the Free Software Foundation; either version 2 of the License, or (at
//=== your option) any later version.
//===
//=== This program is distributed in the hope that it will be useful, but
//=== WITHOUT ANY WARRANTY; without even the implied warranty of
//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
//=== General Public License for more details.
//===
//=== You should have received a copy of the GNU General Public License
//=== along with this program; if not, write to the Free Software
//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
//===
//=== Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2,
//=== Rome - Italy. email: geonetwork@osgeo.org
//==============================================================================
package org.fao.geonet.kernel.harvest.harvester.geoPREST;
import jeeves.interfaces.Logger;
import jeeves.resources.dbms.Dbms;
import jeeves.server.context.ServiceContext;
import jeeves.utils.Xml;
import jeeves.utils.XmlRequest;
import org.fao.geonet.GeonetContext;
import org.fao.geonet.constants.Geonet;
import org.fao.geonet.kernel.DataManager;
import org.fao.geonet.kernel.harvest.BaseAligner;
import org.fao.geonet.kernel.harvest.harvester.CategoryMapper;
import org.fao.geonet.kernel.harvest.harvester.GroupMapper;
import org.fao.geonet.kernel.harvest.harvester.HarvestError;
import org.fao.geonet.kernel.harvest.harvester.HarvestResult;
import org.fao.geonet.kernel.harvest.harvester.RecordInfo;
import org.fao.geonet.kernel.harvest.harvester.UUIDMapper;
import org.jdom.Element;
import java.net.URL;
import java.util.List;
import java.util.Set;
//=============================================================================
public class Aligner extends BaseAligner
{
//--------------------------------------------------------------------------
//---
//--- Constructor
//---
//--------------------------------------------------------------------------
public Aligner(Logger log, ServiceContext sc, Dbms dbms, GeoPRESTParams params) throws Exception {
this.log = log;
this.context = sc;
this.dbms = dbms;
this.params = params;
GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME);
dataMan = gc.getBean(DataManager.class);
result = new HarvestResult();
//--- setup REST operation rest/document?id={uuid}
request = new XmlRequest(new URL(params.baseUrl+"/rest/document"));
}
//--------------------------------------------------------------------------
//---
//--- Alignment method
//---
//--------------------------------------------------------------------------
public HarvestResult align(Set<RecordInfo> records, List<HarvestError> errors) throws Exception { log.info("Start of alignment for : "+ params.name);
//-----------------------------------------------------------------------
//--- retrieve all local categories and groups
//--- retrieve harvested uuids for given harvesting node
localCateg = new CategoryMapper(dbms);
localGroups= new GroupMapper(dbms);
localUuids = new UUIDMapper(dbms, params.uuid);
dbms.commit();
//-----------------------------------------------------------------------
//--- remove old metadata
for (String uuid : localUuids.getUUIDs()) {
if (!exists(records, uuid)) {
String id = localUuids.getID(uuid);
if(log.isDebugEnabled())
log.debug(" - Removing old metadata with local id:"+ id);
dataMan.deleteMetadata(context, dbms, id);
dbms.commit();
result.locallyRemoved++;
}
}
//-----------------------------------------------------------------------
//--- insert/update new metadata
for (RecordInfo ri : records) {
try {
String id = dataMan.getMetadataId(dbms, ri.uuid);
if (id == null) addMetadata(ri);
else updateMetadata(ri, id);
result.totalMetadata++;
}catch (Throwable t) {
errors.add(new HarvestError(t, log));
log.error("Unable to process record from csw (" + this.params.name + ")");
log.error(" Record failed: " + ri.uuid);
}
}
log.info("End of alignment for : "+ params.name);
return result;
}
//--------------------------------------------------------------------------
//---
//--- Private methods : addMetadata
//---
//--------------------------------------------------------------------------
private void addMetadata(RecordInfo ri) throws Exception {
Element md = retrieveMetadata(ri.uuid);
if (md == null) return;
String schema = dataMan.autodetectSchema(md, null);
if (schema == null) {
if (log.isDebugEnabled()) {
log.debug(" - Metadata skipped due to unknown schema. uuid:"+ ri.uuid);
}
result.unknownSchema++;
return;
}
if (log.isDebugEnabled())
log.debug(" - Adding metadata with remote uuid:"+ ri.uuid + " schema:" + schema);
//
// insert metadata
//
int userid = 1;
String group = null, isTemplate = null, docType = null, title = null, category = null;
boolean ufo = false, indexImmediate = false;
String id = dataMan.insertMetadata(context, dbms, schema, md, context.getSerialFactory().getSerial(dbms, "Metadata"), ri.uuid, userid, group, params.uuid, isTemplate, docType, title, category, ri.changeDate, ri.changeDate, ufo, indexImmediate);
int iId = Integer.parseInt(id);
dataMan.setTemplateExt(dbms, iId, "n", null);
dataMan.setHarvestedExt(dbms, iId, params.uuid);
addPrivileges(id, params.getPrivileges(), localGroups, dataMan, context, dbms, log);
addCategories(id, params.getCategories(), localCateg, dataMan, dbms, context, log, null);
dbms.commit();
dataMan.indexMetadata(dbms, id);
result.addedMetadata++;
}
//--------------------------------------------------------------------------
//---
//--- Private methods : updateMetadata
//---
//--------------------------------------------------------------------------
private void updateMetadata(RecordInfo ri, String id) throws Exception
{
String date = localUuids.getChangeDate(ri.uuid);
if (date == null) {
if (log.isDebugEnabled()) {
log.debug(" - Skipped metadata managed by another harvesting node. uuid:"+ ri.uuid +", name:"+ params.name);
}
} else {
if (log.isDebugEnabled()) {
log.debug(" - Comparing date "+date+" with harvested date "+ri.changeDate+" Comparison: "+ri.isMoreRecentThan(date));
}
if (!ri.isMoreRecentThan(date)) {
if (log.isDebugEnabled()) {
log.debug(" - Metadata XML not changed for uuid:"+ ri.uuid);
}
result.unchangedMetadata++;
} else {
if (log.isDebugEnabled()) {
log.debug(" - Updating local metadata for uuid:"+ ri.uuid);
}
Element md = retrieveMetadata(ri.uuid);
if (md == null) return;
//
// update metadata
//
boolean validate = false;
boolean ufo = false;
boolean index = false;
String language = context.getLanguage();
dataMan.updateMetadata(context, dbms, id, md, validate, ufo, index, language, ri.changeDate, false);
dbms.execute("DELETE FROM OperationAllowed WHERE metadataId=?", Integer.parseInt(id));
addPrivileges(id, params.getPrivileges(), localGroups, dataMan, context, dbms, log);
dbms.execute("DELETE FROM MetadataCateg WHERE metadataId=?", Integer.parseInt(id));
addCategories(id, params.getCategories(), localCateg, dataMan, dbms, context, log, null);
dbms.commit();
dataMan.indexMetadata(dbms, id);
result.updatedMetadata++;
}
}
}
//--------------------------------------------------------------------------
//---
//--- Private methods
//---
//--------------------------------------------------------------------------
/**
* Returns true if the uuid is present in the remote node.
*
* @param records
* @param uuid
* @return
*/
private boolean exists(Set<RecordInfo> records, String uuid)
{
for(RecordInfo ri : records) {
if (uuid.equals(ri.uuid)) return true;
}
return false;
}
//--------------------------------------------------------------------------
/**
* Does REST document request. If validation is requested and the metadata
* does not validate, null is returned. If transformation is requested then
* metadata is transformed.
*
* @param uuid uuid of metadata to request
* @return metadata the metadata
*/
private Element retrieveMetadata(String uuid)
{
request.clearParams();
//request.addParam("id","{"+uuid+"}");
request.addParam("id",uuid);
try
{
if (log.isDebugEnabled())
log.debug("Getting record from : "+ request.getHost() +" (uuid:"+ uuid +")");
Element response = null;
try {
response = request.execute();
} catch (Exception e) {
e.printStackTrace();
log.error("Getting record from GeoPortal REST raised exception: "+e.getMessage());
log.error("Sent request "+request.getSentData());
- if (response != null) log.error("Received:\n"+Xml.getString(response));
throw new Exception(e);
}
if(log.isDebugEnabled()) log.debug("Record got:\n"+Xml.getString(response));
// validate it here if requested
if (params.validate) {
if(!dataMan.validate(response)) {
log.info("Ignoring invalid metadata with uuid " + uuid);
result.doesNotValidate++;
return null;
}
}
// transform it here if requested
if (!params.importXslt.equals("none")) {
String thisXslt = context.getAppPath() + Geonet.Path.IMPORT_STYLESHEETS + "/";
thisXslt = thisXslt + params.importXslt;
try {
response = Xml.transform(response, thisXslt);
} catch (Exception e) {
log.info("Cannot transform XML " +Xml.getString(response)+", ignoring. Error was: "+e.getMessage());
result.badFormat++;
return null;
}
}
return response;
}
catch(Exception e)
{
log.warning("Raised exception while getting record : "+ e);
e.printStackTrace();
result.unretrievable++;
//--- we don't raise any exception here. Just try to go on
return null;
}
}
//--------------------------------------------------------------------------
//---
//--- Variables
//---
//--------------------------------------------------------------------------
private Logger log;
private ServiceContext context;
private Dbms dbms;
private XmlRequest request;
private GeoPRESTParams params;
private DataManager dataMan;
private CategoryMapper localCateg;
private GroupMapper localGroups;
private UUIDMapper localUuids;
private HarvestResult result;
}
//=============================================================================
| true
| true
|
private Element retrieveMetadata(String uuid)
{
request.clearParams();
//request.addParam("id","{"+uuid+"}");
request.addParam("id",uuid);
try
{
if (log.isDebugEnabled())
log.debug("Getting record from : "+ request.getHost() +" (uuid:"+ uuid +")");
Element response = null;
try {
response = request.execute();
} catch (Exception e) {
e.printStackTrace();
log.error("Getting record from GeoPortal REST raised exception: "+e.getMessage());
log.error("Sent request "+request.getSentData());
if (response != null) log.error("Received:\n"+Xml.getString(response));
throw new Exception(e);
}
if(log.isDebugEnabled()) log.debug("Record got:\n"+Xml.getString(response));
// validate it here if requested
if (params.validate) {
if(!dataMan.validate(response)) {
log.info("Ignoring invalid metadata with uuid " + uuid);
result.doesNotValidate++;
return null;
}
}
// transform it here if requested
if (!params.importXslt.equals("none")) {
String thisXslt = context.getAppPath() + Geonet.Path.IMPORT_STYLESHEETS + "/";
thisXslt = thisXslt + params.importXslt;
try {
response = Xml.transform(response, thisXslt);
} catch (Exception e) {
log.info("Cannot transform XML " +Xml.getString(response)+", ignoring. Error was: "+e.getMessage());
result.badFormat++;
return null;
}
}
return response;
}
catch(Exception e)
{
log.warning("Raised exception while getting record : "+ e);
e.printStackTrace();
result.unretrievable++;
//--- we don't raise any exception here. Just try to go on
return null;
}
}
|
private Element retrieveMetadata(String uuid)
{
request.clearParams();
//request.addParam("id","{"+uuid+"}");
request.addParam("id",uuid);
try
{
if (log.isDebugEnabled())
log.debug("Getting record from : "+ request.getHost() +" (uuid:"+ uuid +")");
Element response = null;
try {
response = request.execute();
} catch (Exception e) {
e.printStackTrace();
log.error("Getting record from GeoPortal REST raised exception: "+e.getMessage());
log.error("Sent request "+request.getSentData());
throw new Exception(e);
}
if(log.isDebugEnabled()) log.debug("Record got:\n"+Xml.getString(response));
// validate it here if requested
if (params.validate) {
if(!dataMan.validate(response)) {
log.info("Ignoring invalid metadata with uuid " + uuid);
result.doesNotValidate++;
return null;
}
}
// transform it here if requested
if (!params.importXslt.equals("none")) {
String thisXslt = context.getAppPath() + Geonet.Path.IMPORT_STYLESHEETS + "/";
thisXslt = thisXslt + params.importXslt;
try {
response = Xml.transform(response, thisXslt);
} catch (Exception e) {
log.info("Cannot transform XML " +Xml.getString(response)+", ignoring. Error was: "+e.getMessage());
result.badFormat++;
return null;
}
}
return response;
}
catch(Exception e)
{
log.warning("Raised exception while getting record : "+ e);
e.printStackTrace();
result.unretrievable++;
//--- we don't raise any exception here. Just try to go on
return null;
}
}
|
diff --git a/src/main/java/de/minestar/FifthElement/commands/warp/cmdWarpList.java b/src/main/java/de/minestar/FifthElement/commands/warp/cmdWarpList.java
index fc87e17..d51664e 100644
--- a/src/main/java/de/minestar/FifthElement/commands/warp/cmdWarpList.java
+++ b/src/main/java/de/minestar/FifthElement/commands/warp/cmdWarpList.java
@@ -1,211 +1,211 @@
/*
* Copyright (C) 2012 MineStar.de
*
* This file is part of FifthElement.
*
* FifthElement is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 3 of the License.
*
* FifthElement is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with FifthElement. If not, see <http://www.gnu.org/licenses/>.
*/
package de.minestar.FifthElement.commands.warp;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.bukkit.ChatColor;
import org.bukkit.entity.Player;
import de.minestar.FifthElement.core.Core;
import de.minestar.FifthElement.core.Settings;
import de.minestar.FifthElement.data.Warp;
import de.minestar.FifthElement.data.filter.OwnerFilter;
import de.minestar.FifthElement.data.filter.PrivateFilter;
import de.minestar.FifthElement.data.filter.PublicFilter;
import de.minestar.FifthElement.data.filter.UseFilter;
import de.minestar.FifthElement.data.filter.WarpFilter;
import de.minestar.FifthElement.statistics.warp.WarpListStat;
import de.minestar.minestarlibrary.stats.StatisticHandler;
import de.minestar.minestarlibrary.commands.AbstractExtendedCommand;
import de.minestar.minestarlibrary.utils.PlayerUtils;
public class cmdWarpList extends AbstractExtendedCommand {
public cmdWarpList(String syntax, String arguments, String node) {
super(Core.NAME, syntax, arguments, node);
}
@Override
public void execute(String[] args, Player player) {
List<WarpFilter> filterList = new ArrayList<WarpFilter>();
int pageNumber = 1;
filterList.add(new UseFilter(player));
// APPLY FILTER
if (args.length > 0) {
for (int i = 0; i < args.length; ++i) {
String arg = args[i];
// PAGE NUMBER
if (arg.equalsIgnoreCase("-page")) {
// NEXT PARAMETER MUST EXIST
if (i < args.length - 1) {
try {
pageNumber = Integer.valueOf(args[++i]);
// NEGATIVE PAGE NUMBER
if (pageNumber <= 0) {
PlayerUtils.sendError(player, pluginName, "Die Seitenzahl muss gr��er 0 sein!");
return;
}
} catch (Exception e) {
// NOT A VALID NUMBER
PlayerUtils.sendError(player, pluginName, args[i] + " ist keine g�ltige Seitenzahl!");
return;
}
} else {
PlayerUtils.sendError(player, pluginName, "Es fehlt bei '-page' die Seitenzahl!");
return;
}
}
// DISPLAY OWN CREATED WARPS
else if (arg.equalsIgnoreCase("-created")) {
filterList.add(new OwnerFilter(player.getName()));
}
// DISPLAY USEABLE PRIVATE WARPS
else if (arg.equalsIgnoreCase("-private")) {
filterList.add(PrivateFilter.getInstance());
}
// DISPLAY PUBLIC WARPS
else if (arg.equalsIgnoreCase("-public")) {
filterList.add(PublicFilter.getInstance());
}
// DISPLAY WARPS FROM A SPECIFIC PLAYER WHICH THE COMMAND CALLER
// CAN USE
else if (arg.equalsIgnoreCase("-player")) {
String targetName = null;
// AFTER -player THERE MUST BE A PLAYER NAME
if (i < args.length - 1) {
targetName = PlayerUtils.getCorrectPlayerName(args[++i]);
// PLAYER NOT FOUND
if (targetName == null) {
PlayerUtils.sendError(player, pluginName, "Der Spieler '" + args[i] + "' wurde nicht gefunden!");
return;
}
filterList.add(new OwnerFilter(targetName));
} else {
PlayerUtils.sendError(player, pluginName, "Es fehlt bei '-player' der Name des Spielers!");
return;
}
}
}
}
// GET WARPS
List<Warp> results = Core.warpManager.filterWarps(filterList);
// NO WARPS FOUND
if (results.isEmpty()) {
PlayerUtils.sendError(player, pluginName, "Keine Ergebnisse gefunden mit folgendem Filter:");
PlayerUtils.sendError(player, pluginName, filterList.toString());
return;
}
int resultSize = results.size();
// GET THE SINGLE PAGE
int pageSize = Settings.getPageSize();
int fromIndex = pageSize * (pageNumber - 1);
if (fromIndex >= results.size()) {
PlayerUtils.sendError(player, pluginName, "Zu hohe Seitenzahl!");
return;
}
int toIndex = fromIndex + pageSize;
if (toIndex > results.size())
toIndex = results.size();
- int maxNumber = (results.size() / Settings.getPageSize()) + 1;
+ int maxNumber = (int) Math.ceil((double) results.size() / (double) Settings.getPageSize());
results = results.subList(fromIndex, toIndex);
Collections.sort(results, PUBLIC_PRIVATE_SORT);
displayList(results, player, pageNumber, maxNumber, filterList);
// FIRE STATISTIC
StatisticHandler.handleStatistic(new WarpListStat(player.getName(), resultSize, filterList));
}
// USED FOR SORTING PUBLIC AND PRIVATE WARPS
private final static Comparator<Warp> PUBLIC_PRIVATE_SORT = new Comparator<Warp>() {
@Override
public int compare(Warp o1, Warp o2) {
if (!o1.isPublic() && !o2.isPublic()) {
if (o1.getOwner().equals(o2.getOwner())) {
return o1.getName().compareTo(o2.getName());
} else
return o1.getOwner().compareTo(o2.getOwner());
}
if (!o1.isPublic() && o2.isPublic()) {
return -1;
} else
return 1;
}
};
private final static String SEPERATOR = ChatColor.WHITE + "----------------------------------------";
private final static ChatColor NAME_COLOR = ChatColor.GREEN;
private final static ChatColor VALUE_COLOR = ChatColor.GRAY;
private void displayList(List<Warp> list, Player player, int pageNumber, int maxNumber, List<WarpFilter> filter) {
// HEAD
PlayerUtils.sendInfo(player, SEPERATOR);
PlayerUtils.sendInfo(player, String.format("%s %s", NAME_COLOR + "Seite:", VALUE_COLOR + Integer.toString(pageNumber)) + "/" + Integer.toString(maxNumber));
PlayerUtils.sendInfo(player, String.format("%s %s", NAME_COLOR + "Filter:", VALUE_COLOR + filter.toString()));
PlayerUtils.sendInfo(player, SEPERATOR);
// GET WARP INDEX TO START WITH
int index = ((pageNumber - 1) * Settings.getPageSize()) + 1;
if (index < 0)
index = 1;
// HEAD FOR PUBLIC WARPS
if (!list.get(0).isPublic())
PlayerUtils.sendInfo(player, String.format("%s %s", NAME_COLOR + "Private Warps", ""));
boolean priv = false;
ChatColor color = null;
// DISPLAY WARPS
for (Warp warp : list) {
// SEPERATE PUBLIC AND PRIVATE WARPS
if (!priv && warp.isPublic()) {
priv = true;
PlayerUtils.sendInfo(player, String.format("%s %s", NAME_COLOR + "�ffentliche Warps", ""));
}
// COLORS FOR WARPS
// PUBLIC WARPS
if (warp.isPublic())
color = Settings.getWarpListPublic();
// OWNED WARPS
else if (warp.isOwner(player))
color = Settings.getWarpListOwned();
// INVITED TO PRIVATE WARPS
else
color = Settings.getWarpListPrivate();
PlayerUtils.sendInfo(player, String.format("%s%s %s%s %s(%s%s)", NAME_COLOR + "#", VALUE_COLOR + Integer.toString(index++), color, warp.getName(), NAME_COLOR, VALUE_COLOR + warp.getOwner(), NAME_COLOR));
}
}
}
| true
| true
|
public void execute(String[] args, Player player) {
List<WarpFilter> filterList = new ArrayList<WarpFilter>();
int pageNumber = 1;
filterList.add(new UseFilter(player));
// APPLY FILTER
if (args.length > 0) {
for (int i = 0; i < args.length; ++i) {
String arg = args[i];
// PAGE NUMBER
if (arg.equalsIgnoreCase("-page")) {
// NEXT PARAMETER MUST EXIST
if (i < args.length - 1) {
try {
pageNumber = Integer.valueOf(args[++i]);
// NEGATIVE PAGE NUMBER
if (pageNumber <= 0) {
PlayerUtils.sendError(player, pluginName, "Die Seitenzahl muss gr��er 0 sein!");
return;
}
} catch (Exception e) {
// NOT A VALID NUMBER
PlayerUtils.sendError(player, pluginName, args[i] + " ist keine g�ltige Seitenzahl!");
return;
}
} else {
PlayerUtils.sendError(player, pluginName, "Es fehlt bei '-page' die Seitenzahl!");
return;
}
}
// DISPLAY OWN CREATED WARPS
else if (arg.equalsIgnoreCase("-created")) {
filterList.add(new OwnerFilter(player.getName()));
}
// DISPLAY USEABLE PRIVATE WARPS
else if (arg.equalsIgnoreCase("-private")) {
filterList.add(PrivateFilter.getInstance());
}
// DISPLAY PUBLIC WARPS
else if (arg.equalsIgnoreCase("-public")) {
filterList.add(PublicFilter.getInstance());
}
// DISPLAY WARPS FROM A SPECIFIC PLAYER WHICH THE COMMAND CALLER
// CAN USE
else if (arg.equalsIgnoreCase("-player")) {
String targetName = null;
// AFTER -player THERE MUST BE A PLAYER NAME
if (i < args.length - 1) {
targetName = PlayerUtils.getCorrectPlayerName(args[++i]);
// PLAYER NOT FOUND
if (targetName == null) {
PlayerUtils.sendError(player, pluginName, "Der Spieler '" + args[i] + "' wurde nicht gefunden!");
return;
}
filterList.add(new OwnerFilter(targetName));
} else {
PlayerUtils.sendError(player, pluginName, "Es fehlt bei '-player' der Name des Spielers!");
return;
}
}
}
}
// GET WARPS
List<Warp> results = Core.warpManager.filterWarps(filterList);
// NO WARPS FOUND
if (results.isEmpty()) {
PlayerUtils.sendError(player, pluginName, "Keine Ergebnisse gefunden mit folgendem Filter:");
PlayerUtils.sendError(player, pluginName, filterList.toString());
return;
}
int resultSize = results.size();
// GET THE SINGLE PAGE
int pageSize = Settings.getPageSize();
int fromIndex = pageSize * (pageNumber - 1);
if (fromIndex >= results.size()) {
PlayerUtils.sendError(player, pluginName, "Zu hohe Seitenzahl!");
return;
}
int toIndex = fromIndex + pageSize;
if (toIndex > results.size())
toIndex = results.size();
int maxNumber = (results.size() / Settings.getPageSize()) + 1;
results = results.subList(fromIndex, toIndex);
Collections.sort(results, PUBLIC_PRIVATE_SORT);
displayList(results, player, pageNumber, maxNumber, filterList);
// FIRE STATISTIC
StatisticHandler.handleStatistic(new WarpListStat(player.getName(), resultSize, filterList));
}
|
public void execute(String[] args, Player player) {
List<WarpFilter> filterList = new ArrayList<WarpFilter>();
int pageNumber = 1;
filterList.add(new UseFilter(player));
// APPLY FILTER
if (args.length > 0) {
for (int i = 0; i < args.length; ++i) {
String arg = args[i];
// PAGE NUMBER
if (arg.equalsIgnoreCase("-page")) {
// NEXT PARAMETER MUST EXIST
if (i < args.length - 1) {
try {
pageNumber = Integer.valueOf(args[++i]);
// NEGATIVE PAGE NUMBER
if (pageNumber <= 0) {
PlayerUtils.sendError(player, pluginName, "Die Seitenzahl muss gr��er 0 sein!");
return;
}
} catch (Exception e) {
// NOT A VALID NUMBER
PlayerUtils.sendError(player, pluginName, args[i] + " ist keine g�ltige Seitenzahl!");
return;
}
} else {
PlayerUtils.sendError(player, pluginName, "Es fehlt bei '-page' die Seitenzahl!");
return;
}
}
// DISPLAY OWN CREATED WARPS
else if (arg.equalsIgnoreCase("-created")) {
filterList.add(new OwnerFilter(player.getName()));
}
// DISPLAY USEABLE PRIVATE WARPS
else if (arg.equalsIgnoreCase("-private")) {
filterList.add(PrivateFilter.getInstance());
}
// DISPLAY PUBLIC WARPS
else if (arg.equalsIgnoreCase("-public")) {
filterList.add(PublicFilter.getInstance());
}
// DISPLAY WARPS FROM A SPECIFIC PLAYER WHICH THE COMMAND CALLER
// CAN USE
else if (arg.equalsIgnoreCase("-player")) {
String targetName = null;
// AFTER -player THERE MUST BE A PLAYER NAME
if (i < args.length - 1) {
targetName = PlayerUtils.getCorrectPlayerName(args[++i]);
// PLAYER NOT FOUND
if (targetName == null) {
PlayerUtils.sendError(player, pluginName, "Der Spieler '" + args[i] + "' wurde nicht gefunden!");
return;
}
filterList.add(new OwnerFilter(targetName));
} else {
PlayerUtils.sendError(player, pluginName, "Es fehlt bei '-player' der Name des Spielers!");
return;
}
}
}
}
// GET WARPS
List<Warp> results = Core.warpManager.filterWarps(filterList);
// NO WARPS FOUND
if (results.isEmpty()) {
PlayerUtils.sendError(player, pluginName, "Keine Ergebnisse gefunden mit folgendem Filter:");
PlayerUtils.sendError(player, pluginName, filterList.toString());
return;
}
int resultSize = results.size();
// GET THE SINGLE PAGE
int pageSize = Settings.getPageSize();
int fromIndex = pageSize * (pageNumber - 1);
if (fromIndex >= results.size()) {
PlayerUtils.sendError(player, pluginName, "Zu hohe Seitenzahl!");
return;
}
int toIndex = fromIndex + pageSize;
if (toIndex > results.size())
toIndex = results.size();
int maxNumber = (int) Math.ceil((double) results.size() / (double) Settings.getPageSize());
results = results.subList(fromIndex, toIndex);
Collections.sort(results, PUBLIC_PRIVATE_SORT);
displayList(results, player, pageNumber, maxNumber, filterList);
// FIRE STATISTIC
StatisticHandler.handleStatistic(new WarpListStat(player.getName(), resultSize, filterList));
}
|
diff --git a/fontes/GACWeb/src/br/com/sw2/gac/bean/ParametrosBean.java b/fontes/GACWeb/src/br/com/sw2/gac/bean/ParametrosBean.java
index 2b9cfdc..4183333 100644
--- a/fontes/GACWeb/src/br/com/sw2/gac/bean/ParametrosBean.java
+++ b/fontes/GACWeb/src/br/com/sw2/gac/bean/ParametrosBean.java
@@ -1,78 +1,80 @@
package br.com.sw2.gac.bean;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ViewScoped;
import javax.faces.event.ActionEvent;
import br.com.sw2.gac.business.ParametroBusiness;
import br.com.sw2.gac.exception.BusinessException;
import br.com.sw2.gac.vo.ParametroVO;
/**
* <b>Descrição: Controller da tela de configuracao de parametros.</b> <br>
* .
* @author: SW2
* @version 1.0 Copyright 2012 SmartAngel.
*/
@ManagedBean
@ViewScoped
public class ParametrosBean extends BaseBean {
/** Constante serialVersionUID. */
private static final long serialVersionUID = 4107789141198966008L;
private ParametroBusiness parametroBusiness = new ParametroBusiness();
/** Atributo parametro. */
private ParametroVO parametro;
/**
* Construtor Padrao Instancia um novo objeto ParametrosBean.
*/
public ParametrosBean() {
this.parametro = this.parametroBusiness.recuperarParametros();
if (null == parametro) {
parametro = new ParametroVO();
}
}
/**
* Nome: salvar Salvar.
* @param event the event
* @see
*/
public void salvar(ActionEvent event) {
this.getLogger().debug("***** Iniciando método salvar *****");
this.getLogger().debug("Dias bem estar: " + this.parametro.getDiasBemEstar());
this.getLogger().debug("Dias dados: " + this.parametro.getDiasDados());
this.getLogger().debug("Total Rotina Cliente: " + this.parametro.getToleraRotinaCliente());
// Criar o novo parametro com os dados informados pelo usuario
try {
this.parametroBusiness.adicionarNovoParametro(this.parametro);
+ //Atualiza para recuperar o ID;
+ this.parametro = this.parametroBusiness.recuperarParametros();
setFacesMessage("message.parametros.save.sucess");
} catch (BusinessException e) {
setFacesMessage("message.generic.system.unavailable");
this.getLogger().error(e);
}
this.getLogger().debug("***** Finalizando método salvar *****");
}
/**
* Nome: getParametro Recupera o valor do atributo 'parametro'.
* @return valor do atributo 'parametro'
* @see
*/
public ParametroVO getParametro() {
return parametro;
}
/**
* Nome: setParametro Registra o valor do atributo 'parametro'.
* @param parametro valor do atributo parametro
* @see
*/
public void setParametro(ParametroVO parametro) {
this.parametro = parametro;
}
}
| true
| true
|
public void salvar(ActionEvent event) {
this.getLogger().debug("***** Iniciando método salvar *****");
this.getLogger().debug("Dias bem estar: " + this.parametro.getDiasBemEstar());
this.getLogger().debug("Dias dados: " + this.parametro.getDiasDados());
this.getLogger().debug("Total Rotina Cliente: " + this.parametro.getToleraRotinaCliente());
// Criar o novo parametro com os dados informados pelo usuario
try {
this.parametroBusiness.adicionarNovoParametro(this.parametro);
setFacesMessage("message.parametros.save.sucess");
} catch (BusinessException e) {
setFacesMessage("message.generic.system.unavailable");
this.getLogger().error(e);
}
this.getLogger().debug("***** Finalizando método salvar *****");
}
|
public void salvar(ActionEvent event) {
this.getLogger().debug("***** Iniciando método salvar *****");
this.getLogger().debug("Dias bem estar: " + this.parametro.getDiasBemEstar());
this.getLogger().debug("Dias dados: " + this.parametro.getDiasDados());
this.getLogger().debug("Total Rotina Cliente: " + this.parametro.getToleraRotinaCliente());
// Criar o novo parametro com os dados informados pelo usuario
try {
this.parametroBusiness.adicionarNovoParametro(this.parametro);
//Atualiza para recuperar o ID;
this.parametro = this.parametroBusiness.recuperarParametros();
setFacesMessage("message.parametros.save.sucess");
} catch (BusinessException e) {
setFacesMessage("message.generic.system.unavailable");
this.getLogger().error(e);
}
this.getLogger().debug("***** Finalizando método salvar *****");
}
|
diff --git a/core/src/test/java/quickfix/util/ExpectedTestFailure.java b/core/src/test/java/quickfix/util/ExpectedTestFailure.java
index 985d605..f6dbbaf 100644
--- a/core/src/test/java/quickfix/util/ExpectedTestFailure.java
+++ b/core/src/test/java/quickfix/util/ExpectedTestFailure.java
@@ -1,79 +1,79 @@
package quickfix.util;
import junit.framework.Assert;
/**
* A wrapper around a piece of code that we expect to fail and throw an exception.
* We wrap the call, capture the exception and verify the excpetion class is as expected.
* If the "contains" string is provided we verity that it matches the excepion output as well.
*
* <PRE>
* (new ExpectedTestFailure(OrderParsingException.class) {
* protected void execute() throws OrderParsingException {
* <... Code throwing exception goes here ... >
* }}).run();
* </PRE>
*
* @author Toli Kuznets
* @version $Id: ExpectedTestFailure.java 211 2006-07-20 15:08:14Z toli $
*/
public abstract class ExpectedTestFailure
{
private String mContains;
private Class mThrowable;
public ExpectedTestFailure(Class inThrowable) {
this(inThrowable, null);
}
public ExpectedTestFailure(Class inThrowable, String inContains)
{
mThrowable = inThrowable;
mContains = inContains;
}
/** Subclasses must override this method with an implementation that
* throws their expected error
* @throws Throwable
*/
protected abstract void execute() throws Throwable;
/** Executes the code that was implemented in @link {execute()} method */
public Throwable run()
{
try {
execute();
} catch(Throwable error) {
validateError(error);
return error;
}
Assert.fail("Expected an error but no exception was thrown");
return null;
}
/** Validate the passed-in throwable against the class that we expected to find
* The message of the passed in trowable is validated againt the expected message
* if there is one
*
* If we are expecting a message but the incoming exception.getMessage() doesn't contain it,
* also check exception.toString() as well - for the case of exceptions that aren't
* constructed correctly
*
* @param inError throwable to validate
*/
protected void validateError(Throwable inError)
{
if ((mThrowable!=null) &&
(!mThrowable.isAssignableFrom(inError.getClass()))) {
Assert.fail("Thrown throwable was of the wrong class: "+
inError.getClass()+": "+inError);
}
if ((mContains!=null) &&
(((inError.getMessage()==null) ||
(inError.getMessage().indexOf(mContains)==-1))) &&
(inError.toString().indexOf(mContains) == -1)) {
Assert.fail("Thrown throwable contained incorrect message: "+
- inError.getMessage()+": "+inError);
+ "looking for '" + mContains + "' in '" + inError.getMessage() + "'");
}
}
}
| true
| true
|
protected void validateError(Throwable inError)
{
if ((mThrowable!=null) &&
(!mThrowable.isAssignableFrom(inError.getClass()))) {
Assert.fail("Thrown throwable was of the wrong class: "+
inError.getClass()+": "+inError);
}
if ((mContains!=null) &&
(((inError.getMessage()==null) ||
(inError.getMessage().indexOf(mContains)==-1))) &&
(inError.toString().indexOf(mContains) == -1)) {
Assert.fail("Thrown throwable contained incorrect message: "+
inError.getMessage()+": "+inError);
}
}
|
protected void validateError(Throwable inError)
{
if ((mThrowable!=null) &&
(!mThrowable.isAssignableFrom(inError.getClass()))) {
Assert.fail("Thrown throwable was of the wrong class: "+
inError.getClass()+": "+inError);
}
if ((mContains!=null) &&
(((inError.getMessage()==null) ||
(inError.getMessage().indexOf(mContains)==-1))) &&
(inError.toString().indexOf(mContains) == -1)) {
Assert.fail("Thrown throwable contained incorrect message: "+
"looking for '" + mContains + "' in '" + inError.getMessage() + "'");
}
}
|
diff --git a/eclipse/plugins/net.sf.orcc.core/src/net/sf/orcc/ir/util/EcoreHelper.java b/eclipse/plugins/net.sf.orcc.core/src/net/sf/orcc/ir/util/EcoreHelper.java
index 43fb24a36..7b5822b8f 100644
--- a/eclipse/plugins/net.sf.orcc.core/src/net/sf/orcc/ir/util/EcoreHelper.java
+++ b/eclipse/plugins/net.sf.orcc.core/src/net/sf/orcc/ir/util/EcoreHelper.java
@@ -1,404 +1,407 @@
/*
* Copyright (c) 2011, IETR/INSA of Rennes
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the IETR/INSA of Rennes nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
* WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
package net.sf.orcc.ir.util;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import net.sf.orcc.ir.Actor;
import net.sf.orcc.ir.Def;
import net.sf.orcc.ir.Expression;
import net.sf.orcc.ir.Instruction;
import net.sf.orcc.ir.IrFactory;
import net.sf.orcc.ir.Node;
import net.sf.orcc.ir.NodeBlock;
import net.sf.orcc.ir.NodeWhile;
import net.sf.orcc.ir.Use;
import net.sf.orcc.ir.impl.IrResourceFactoryImpl;
import net.sf.orcc.util.OrccUtil;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.emf.common.util.TreeIterator;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.eclipse.emf.ecore.util.EcoreUtil.Copier;
/**
* This class contains several methods to help the manipulation of EMF models.
*
* @author Matthieu Wipliez
* @author Herve Yviquel
*
*/
public class EcoreHelper {
/**
* Add the given instruction before the given expression. If the expression
* is contained by an instruction then the instruction to add is put
* directly before, else the instruction is put to the previous nodeblock
* which is created if needed. Return <code>true</code> if the given
* instruction is added in the current block.
*
* @param expression
* an expression
* @param instruction
* the instruction to add before the given expression
* @param usePreviousJoinNode
* <code>true</code> if the current IR form has join node before
* while node
* @return <code>true</code> if the given instruction is added in the
* current block
*/
public static boolean addInstBeforeExpr(Expression expression,
Instruction instruction, boolean usePreviousJoinNode) {
Instruction instContainer = EcoreHelper.getContainerOfType(expression,
Instruction.class);
Node nodeContainer = EcoreHelper.getContainerOfType(expression,
Node.class);
if (instContainer != null) {
- if (usePreviousJoinNode && isWhileJoinNode(nodeContainer)) {
+ if (usePreviousJoinNode && instContainer.isPhi()
+ && isWhileJoinNode(nodeContainer)) {
NodeWhile nodeWhile = EcoreHelper.getContainerOfType(
nodeContainer, NodeWhile.class);
addToPreviousNodeBlock(nodeWhile, instruction);
return false;
+ } else {
+ List<Instruction> instructions = EcoreHelper
+ .getContainingList(instContainer);
+ instructions.add(instructions.indexOf(instContainer),
+ instruction);
+ return true;
}
- List<Instruction> instructions = EcoreHelper
- .getContainingList(instContainer);
- instructions.add(instructions.indexOf(instContainer), instruction);
- return true;
} else {
if (usePreviousJoinNode && nodeContainer.isWhileNode()) {
NodeBlock joinNode = ((NodeWhile) nodeContainer).getJoinNode();
joinNode.add(instruction);
return false;
} else {
addToPreviousNodeBlock(nodeContainer, instruction);
return false;
}
}
}
private static void addToPreviousNodeBlock(Node node,
Instruction instruction) {
List<Node> nodes = EcoreHelper.getContainingList(node);
NodeBlock nodeBlock = IrFactory.eINSTANCE.createNodeBlock();
nodeBlock.add(instruction);
nodes.add(nodes.indexOf(node), nodeBlock);
}
/**
* Returns a deep copy of the given objects, and updates def/use chains.
*
* @param eObjects
* a list of objects
* @return a deep copy of the given objects with def/use chains correctly
* updated
*/
public static <T extends EObject> Collection<T> copy(Collection<T> eObjects) {
Copier copier = new Copier();
Collection<T> result = copier.copyAll(eObjects);
copier.copyReferences();
TreeIterator<EObject> it = EcoreUtil.getAllContents(eObjects);
while (it.hasNext()) {
EObject object = it.next();
if (object instanceof Def) {
Def def = (Def) object;
Def copyDef = (Def) copier.get(def);
copyDef.setVariable(def.getVariable());
} else if (object instanceof Use) {
Use use = (Use) object;
Use copyUse = (Use) copier.get(use);
copyUse.setVariable(use.getVariable());
}
}
return result;
}
/**
* Returns a deep copy of the given expression, and updates uses.
*
* @param expression
* an expression
* @return a deep copy of the given expression with uses correctly updated
*/
public static <T extends EObject> T copy(T eObject) {
Copier copier = new Copier();
@SuppressWarnings("unchecked")
T result = (T) copier.copy(eObject);
copier.copyReferences();
TreeIterator<EObject> it = EcoreUtil.getAllContents(eObject, true);
while (it.hasNext()) {
EObject object = it.next();
if (object instanceof Def) {
Def def = (Def) object;
Def copyDef = (Def) copier.get(def);
copyDef.setVariable(def.getVariable());
} else if (object instanceof Use) {
Use use = (Use) object;
Use copyUse = (Use) copier.get(use);
copyUse.setVariable(use.getVariable());
}
}
return result;
}
/**
* Removes the def/use chains of the given object, and then removes the
* object itself from its container.
*
* @param eObject
* an EObject
*/
public static void delete(EObject eObject) {
removeUses(eObject);
removeDefs(eObject);
EcoreUtil.remove(eObject);
}
/**
* Deletes the given objects, and updates the def/use chains.
*
* @param objects
* a list of objects
*/
public static void delete(List<? extends EObject> eObjects) {
while (!eObjects.isEmpty()) {
delete(eObjects.get(0));
}
}
/**
* Deserializes the XMI representation of an actor stored in the given file,
* and returns this actor.
*
* @param file
* a .ir file
* @return the actor serialized with XMI in the given file
*/
public static Actor deserializeActor(IFile file) {
ResourceSet set = new ResourceSetImpl();
Resource resource = set.getResource(URI.createPlatformResourceURI(file
.getFullPath().toString(), true), true);
Actor actor = (Actor) resource.getContents().get(0);
return actor;
}
/**
* Returns the container of <code>ele</code> with the given type, or
* <code>null</code> if no such container exists. This method has been
* copied from the EcoreUtil2 class of Xtext.
*
* @param <T>
* type parameter
* @param ele
* an object
* @param type
* the type of the container
* @return the container of <code>ele</code> with the given type
*/
@SuppressWarnings("unchecked")
public static <T extends EObject> T getContainerOfType(EObject ele,
Class<T> type) {
if (type.isAssignableFrom(ele.getClass())) {
return (T) ele;
}
if (ele.eContainer() != null) {
return getContainerOfType(ele.eContainer(), type);
}
return null;
}
/**
* Returns the list that contains this object, or <code>null</code>.
*
* @param <T>
* type of the objects contained in the list
* @param <T1>
* type of the object as a specialization of <code>T</code>
* @param eObject
* the object
* @return the list that contains this object, or <code>null</code>
*/
@SuppressWarnings("unchecked")
public static <T extends EObject, T1 extends T> List<T> getContainingList(
T1 eObject) {
EStructuralFeature feature = eObject.eContainingFeature();
if (feature.getUpperBound() == EStructuralFeature.UNBOUNDED_MULTIPLICITY) {
Object obj = eObject.eContainer().eGet(feature);
if (obj != null && List.class.isAssignableFrom(obj.getClass())) {
return (List<T>) obj;
}
}
return null;
}
public static List<Use> getUses(EObject eObject) {
List<Use> uses = new ArrayList<Use>();
TreeIterator<EObject> it = eObject.eAllContents();
while (it.hasNext()) {
EObject descendant = it.next();
if (descendant instanceof Use) {
uses.add((Use) descendant);
}
}
return uses;
}
private static boolean isWhileJoinNode(Node node) {
if (node.isBlockNode()) {
NodeWhile nodeWhile = EcoreHelper.getContainerOfType(node,
NodeWhile.class);
return (nodeWhile != null && nodeWhile.getJoinNode() == node);
}
return false;
}
/**
* Removes the defs present in the given object.
*
* @param eObject
* an EObject
*/
public static void removeDefs(EObject eObject) {
TreeIterator<EObject> it = eObject.eAllContents();
while (it.hasNext()) {
EObject descendant = it.next();
if (descendant instanceof Def) {
Def def = (Def) descendant;
def.setVariable(null);
}
}
}
/**
* Removes the uses present in the given object.
*
* @param eObject
* an EObject
*/
public static void removeUses(EObject eObject) {
TreeIterator<EObject> it = eObject.eAllContents();
while (it.hasNext()) {
EObject descendant = it.next();
if (descendant instanceof Use) {
Use use = (Use) descendant;
use.setVariable(null);
}
}
}
/**
* Serializes the given actor to the given output folder.
*
* @param outputFolder
* an IFolder of the workspace
* @param actor
* an actor
* @return <code>true</code> if the serialization succeeded
*/
public static boolean serializeActor(IFolder outputFolder, Actor actor) {
URI uri = URI.createPlatformResourceURI(outputFolder.getFullPath()
.append(OrccUtil.getFile(actor)).addFileExtension("ir")
.toString(), true);
return serializeActor(uri, actor);
}
/**
* Serializes the given actor to the given output folder.
*
* @param outputFolder
* output folder
* @param actor
* an actor
* @return <code>true</code> if the serialization succeeded
*/
public static boolean serializeActor(String outputFolder, Actor actor) {
String pathName = outputFolder + File.separator
+ OrccUtil.getFile(actor) + ".ir";
URI uri = URI.createFileURI(pathName);
return serializeActor(uri, actor);
}
/**
* Serializes the given actor to the given URI.
*
* @param uri
* URI
* @param actor
* an actor
* @return <code>true</code> if the serialization succeeded
*/
private static boolean serializeActor(URI uri, Actor actor) {
// check that the factory is registered
// (only happens in command-line mode)
// ...
// duck you command line :)
Map<String, Object> extToFactoryMap = Resource.Factory.Registry.INSTANCE
.getExtensionToFactoryMap();
Object instance = extToFactoryMap.get("ir");
if (instance == null) {
instance = new IrResourceFactoryImpl();
extToFactoryMap.put("ir", instance);
}
// serialization
ResourceSet set = new ResourceSetImpl();
Resource resource = set.createResource(uri);
resource.getContents().add(actor);
try {
resource.save(null);
return true;
} catch (IOException e) {
// uncomment to see details of exception
e.printStackTrace();
return false;
}
}
}
| false
| true
|
public static boolean addInstBeforeExpr(Expression expression,
Instruction instruction, boolean usePreviousJoinNode) {
Instruction instContainer = EcoreHelper.getContainerOfType(expression,
Instruction.class);
Node nodeContainer = EcoreHelper.getContainerOfType(expression,
Node.class);
if (instContainer != null) {
if (usePreviousJoinNode && isWhileJoinNode(nodeContainer)) {
NodeWhile nodeWhile = EcoreHelper.getContainerOfType(
nodeContainer, NodeWhile.class);
addToPreviousNodeBlock(nodeWhile, instruction);
return false;
}
List<Instruction> instructions = EcoreHelper
.getContainingList(instContainer);
instructions.add(instructions.indexOf(instContainer), instruction);
return true;
} else {
if (usePreviousJoinNode && nodeContainer.isWhileNode()) {
NodeBlock joinNode = ((NodeWhile) nodeContainer).getJoinNode();
joinNode.add(instruction);
return false;
} else {
addToPreviousNodeBlock(nodeContainer, instruction);
return false;
}
}
}
|
public static boolean addInstBeforeExpr(Expression expression,
Instruction instruction, boolean usePreviousJoinNode) {
Instruction instContainer = EcoreHelper.getContainerOfType(expression,
Instruction.class);
Node nodeContainer = EcoreHelper.getContainerOfType(expression,
Node.class);
if (instContainer != null) {
if (usePreviousJoinNode && instContainer.isPhi()
&& isWhileJoinNode(nodeContainer)) {
NodeWhile nodeWhile = EcoreHelper.getContainerOfType(
nodeContainer, NodeWhile.class);
addToPreviousNodeBlock(nodeWhile, instruction);
return false;
} else {
List<Instruction> instructions = EcoreHelper
.getContainingList(instContainer);
instructions.add(instructions.indexOf(instContainer),
instruction);
return true;
}
} else {
if (usePreviousJoinNode && nodeContainer.isWhileNode()) {
NodeBlock joinNode = ((NodeWhile) nodeContainer).getJoinNode();
joinNode.add(instruction);
return false;
} else {
addToPreviousNodeBlock(nodeContainer, instruction);
return false;
}
}
}
|
diff --git a/mes-plugins/mes-plugins-material-flow/src/main/java/com/qcadoo/mes/materialFlow/MaterialFlowService.java b/mes-plugins/mes-plugins-material-flow/src/main/java/com/qcadoo/mes/materialFlow/MaterialFlowService.java
index 64fc5f652a..e03dc8ad16 100644
--- a/mes-plugins/mes-plugins-material-flow/src/main/java/com/qcadoo/mes/materialFlow/MaterialFlowService.java
+++ b/mes-plugins/mes-plugins-material-flow/src/main/java/com/qcadoo/mes/materialFlow/MaterialFlowService.java
@@ -1,283 +1,282 @@
/**
* ***************************************************************************
* Copyright (c) 2010 Qcadoo Limited
* Project: Qcadoo MES
* Version: 0.4.6
*
* This file is part of Qcadoo.
*
* Qcadoo is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation; either version 3 of the License,
* or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
* ***************************************************************************
*/
package com.qcadoo.mes.materialFlow;
import static com.qcadoo.mes.basic.constants.BasicConstants.MODEL_PRODUCT;
import static com.qcadoo.mes.materialFlow.constants.MaterialFlowConstants.MODEL_TRANSFER;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.google.common.collect.Sets;
import com.qcadoo.mes.basic.constants.BasicConstants;
import com.qcadoo.mes.materialFlow.constants.MaterialFlowConstants;
import com.qcadoo.model.api.DataDefinition;
import com.qcadoo.model.api.DataDefinitionService;
import com.qcadoo.model.api.Entity;
import com.qcadoo.model.api.search.SearchCriteriaBuilder;
import com.qcadoo.model.api.search.SearchOrders;
import com.qcadoo.model.api.search.SearchProjections;
import com.qcadoo.model.api.search.SearchRestrictions;
import com.qcadoo.model.api.search.SearchResult;
import com.qcadoo.view.api.ComponentState;
import com.qcadoo.view.api.ViewDefinitionState;
import com.qcadoo.view.api.components.FieldComponent;
import com.qcadoo.view.api.utils.NumberGeneratorService;
@Service
public class MaterialFlowService {
@Autowired
private DataDefinitionService dataDefinitionService;
@Autowired
private NumberGeneratorService numberGeneratorService;
public BigDecimal calculateShouldBeInStockArea(final String stockAreas, final String product, final String forDate) {
BigDecimal countProductIn = BigDecimal.ZERO;
BigDecimal countProductOut = BigDecimal.ZERO;
- BigDecimal quantity = BigDecimal.ZERO;
BigDecimal countProduct = BigDecimal.ZERO;
Date lastCorrectionDate = null;
DataDefinition transferDataCorrection = dataDefinitionService.get("materialFlow", "stockCorrection");
DataDefinition transferTo = dataDefinitionService.get("materialFlow", "transfer");
DataDefinition transferFrom = dataDefinitionService.get("materialFlow", "transfer");
Long stockAreasId = Long.valueOf(stockAreas);
Long productId = Long.valueOf(product);
Entity resultDataCorrection = transferDataCorrection.find().add(SearchRestrictions.eq("stockAreas.id", stockAreasId))
.add(SearchRestrictions.eq("product.id", productId)).addOrder(SearchOrders.desc("stockCorrectionDate"))
.setMaxResults(1).uniqueResult();
if (resultDataCorrection != null) {
lastCorrectionDate = (Date) resultDataCorrection.getField("stockCorrectionDate");
countProduct = (BigDecimal) resultDataCorrection.getField("found");
}
SearchResult resultTo = null;
SearchResult resultFrom = null;
if (lastCorrectionDate == null) {
resultTo = transferTo.find(
"where stockAreasTo = '" + stockAreas + "' and product = '" + product + "' and date <= '" + forDate + "'")
.list();
resultFrom = transferFrom.find(
"where stockAreasFrom = '" + stockAreas + "' and product = '" + product + "' and date <= '" + forDate + "'")
.list();
} else {
resultTo = transferTo.find(
"where stockAreasTo = '" + stockAreas + "' and product = '" + product + "' and date <= '" + forDate
+ "' and date > '" + lastCorrectionDate + "'").list();
resultFrom = transferFrom.find(
"where stockAreasFrom = '" + stockAreas + "' and product = '" + product + "' and date <= '" + forDate
+ "' and date > '" + lastCorrectionDate + "'").list();
}
for (Entity e : resultTo.getEntities()) {
- quantity = (BigDecimal) e.getField("quantity");
+ BigDecimal quantity = (BigDecimal) e.getField("quantity");
countProductIn = countProductIn.add(quantity);
}
for (Entity e : resultFrom.getEntities()) {
- quantity = (BigDecimal) e.getField("quantity");
+ BigDecimal quantity = (BigDecimal) e.getField("quantity");
countProductOut = countProductOut.add(quantity);
}
if (lastCorrectionDate == null) {
countProductIn = countProductIn.subtract(countProductOut);
} else {
countProductIn = countProductIn.add(countProduct);
countProductIn = countProductIn.subtract(countProductOut);
}
if (countProductIn.compareTo(BigDecimal.ZERO) == -1)
countProductIn = BigDecimal.ZERO;
return countProductIn;
}
public void refreshShouldBeInStockCorrectionDetail(final ViewDefinitionState state, final ComponentState componentState,
final String[] args) {
refreshShouldBeInStockCorrectionDetail(state);
}
public void refreshShouldBeInStockCorrectionDetail(final ViewDefinitionState state) {
FieldComponent stockAreas = (FieldComponent) state.getComponentByReference("stockAreas");
FieldComponent product = (FieldComponent) state.getComponentByReference("product");
FieldComponent date = (FieldComponent) state.getComponentByReference("stockCorrectionDate");
FieldComponent should = (FieldComponent) state.getComponentByReference("shouldBe");
if (stockAreas != null && product != null && date != null) {
if (stockAreas.getFieldValue() != null && product.getFieldValue() != null
&& !date.getFieldValue().toString().equals("")) {
String stockAreasNumber = stockAreas.getFieldValue().toString();
String productNumber = product.getFieldValue().toString();
String forDate = date.getFieldValue().toString();
BigDecimal shouldBe = calculateShouldBeInStockArea(stockAreasNumber, productNumber, forDate);
if (shouldBe != null && shouldBe != BigDecimal.ZERO) {
should.setFieldValue(shouldBe);
} else {
should.setFieldValue(BigDecimal.ZERO);
}
}
}
}
public boolean validateTransfer(final DataDefinition dataDefinition, final Entity entity) {
Entity stockAreasFrom = (Entity) (entity.getField("stockAreasFrom") != null ? entity.getField("stockAreasFrom") : null);
Entity stockAreasTo = (Entity) (entity.getField("stockAreasTo") != null ? entity.getField("stockAreasTo") : null);
if (stockAreasFrom == null && stockAreasTo == null) {
entity.addError(dataDefinition.getField("stockAreasFrom"),
"materialFlow.validate.global.error.fillAtLeastOneStockAreas");
entity.addError(dataDefinition.getField("stockAreasTo"),
"materialFlow.validate.global.error.fillAtLeastOneStockAreas");
return false;
}
return true;
}
public void generateTransferNumber(final ViewDefinitionState state, final ComponentState componentState, final String[] args) {
if (!(componentState instanceof FieldComponent)) {
throw new IllegalStateException("component is not FieldComponentState");
}
FieldComponent number = (FieldComponent) state.getComponentByReference("number");
FieldComponent productState = (FieldComponent) componentState;
if (!numberGeneratorService.checkIfShouldInsertNumber(state, "form", "number")) {
return;
}
if (productState.getFieldValue() != null) {
Entity product = getAreaById((Long) productState.getFieldValue());
if (product != null) {
String numberValue = product.getField("number") + "-"
+ numberGeneratorService.generateNumber("materialFlow", "transfer", 3);
number.setFieldValue(numberValue);
}
}
}
private Entity getAreaById(final Long productId) {
DataDefinition instructionDD = dataDefinitionService.get(BasicConstants.PLUGIN_IDENTIFIER, BasicConstants.MODEL_PRODUCT);
@SuppressWarnings("deprecation")
SearchCriteriaBuilder searchCriteria = instructionDD.find().setMaxResults(1).isIdEq(productId);
SearchResult searchResult = searchCriteria.list();
if (searchResult.getTotalNumberOfEntities() == 1) {
return searchResult.getEntities().get(0);
}
return null;
}
public void fillNumberFieldValue(final ViewDefinitionState view) {
if (view.getComponentByReference("number").getFieldValue() != null) {
return;
}
numberGeneratorService.generateAndInsertNumber(view, MaterialFlowConstants.PLUGIN_IDENTIFIER, MODEL_TRANSFER, "form",
"number");
}
public void fillUnitFieldValue(final ViewDefinitionState view, final ComponentState componentState, final String[] args) {
Long productId = (Long) view.getComponentByReference("product").getFieldValue();
if (productId == null) {
return;
}
Entity product = dataDefinitionService.get(BasicConstants.PLUGIN_IDENTIFIER, MODEL_PRODUCT).get(productId);
FieldComponent unitField = null;
String unit = product.getField("unit").toString();
for (String referenceName : Sets.newHashSet("quantityUNIT", "shouldBeUNIT", "foundUNIT")) {
unitField = (FieldComponent) view.getComponentByReference(referenceName);
if (unitField == null) {
continue;
}
unitField.setFieldValue(unit);
unitField.requestComponentUpdateState();
}
}
public Map<Entity, BigDecimal> calculateMaterialQuantitiesInStockArea(Entity materialsInStockAreas) {
List<Entity> stockAreas = new ArrayList<Entity>(materialsInStockAreas.getHasManyField("stockAreas"));
Map<Entity, BigDecimal> reportData = new HashMap<Entity, BigDecimal>();
for (Entity component : stockAreas) {
Entity stockArea = (Entity) component.getField("stockAreas");
String stockAreaNumber = stockArea.getField("number").toString();
List<Entity> products = getProductsSeenInStockArea(stockAreaNumber);
String forDate = ((Date) materialsInStockAreas.getField("materialFlowForDate")).toString();
for (Entity product : products) {
BigDecimal quantity = calculateShouldBeInStockArea(stockAreaNumber, product.getStringField("number"), forDate);
if (reportData.containsKey(product))
reportData.put(product, reportData.get(product).add(quantity));
else
reportData.put(product, quantity);
}
}
return reportData;
}
public List<Entity> getProductsSeenInStockArea(String stockAreaNumber) {
Long id = Long.valueOf(stockAreaNumber);
DataDefinition dataDefProduct = dataDefinitionService.get(MaterialFlowConstants.PLUGIN_IDENTIFIER_BASIC,
MaterialFlowConstants.MODEL_PRODUCT);
List<Entity> productsFromTransfers = dataDefProduct.find().createAlias("transfer", "t")
.addOrder(SearchOrders.asc("t.product.id"))
.setProjection(SearchProjections.distinct(SearchProjections.field("t.product")))
.add(SearchRestrictions.eqField("t.product.id", "id")).add(SearchRestrictions.eq("t.stockAreasTo.id", id)).list()
.getEntities();
List<Entity> productsFromStockCorrections = dataDefProduct.find().createAlias("stockCorrection", "sc")
.addOrder(SearchOrders.asc("sc.product.id"))
.setProjection(SearchProjections.distinct(SearchProjections.field("sc.product")))
.add(SearchRestrictions.eqField("sc.product.id", "id")).add(SearchRestrictions.eq("sc.stockAreas.id", id)).list()
.getEntities();
for (Entity product : productsFromStockCorrections)
if (!productsFromTransfers.contains(product))
productsFromTransfers.add(product);
return productsFromTransfers;
}
}
| false
| true
|
public BigDecimal calculateShouldBeInStockArea(final String stockAreas, final String product, final String forDate) {
BigDecimal countProductIn = BigDecimal.ZERO;
BigDecimal countProductOut = BigDecimal.ZERO;
BigDecimal quantity = BigDecimal.ZERO;
BigDecimal countProduct = BigDecimal.ZERO;
Date lastCorrectionDate = null;
DataDefinition transferDataCorrection = dataDefinitionService.get("materialFlow", "stockCorrection");
DataDefinition transferTo = dataDefinitionService.get("materialFlow", "transfer");
DataDefinition transferFrom = dataDefinitionService.get("materialFlow", "transfer");
Long stockAreasId = Long.valueOf(stockAreas);
Long productId = Long.valueOf(product);
Entity resultDataCorrection = transferDataCorrection.find().add(SearchRestrictions.eq("stockAreas.id", stockAreasId))
.add(SearchRestrictions.eq("product.id", productId)).addOrder(SearchOrders.desc("stockCorrectionDate"))
.setMaxResults(1).uniqueResult();
if (resultDataCorrection != null) {
lastCorrectionDate = (Date) resultDataCorrection.getField("stockCorrectionDate");
countProduct = (BigDecimal) resultDataCorrection.getField("found");
}
SearchResult resultTo = null;
SearchResult resultFrom = null;
if (lastCorrectionDate == null) {
resultTo = transferTo.find(
"where stockAreasTo = '" + stockAreas + "' and product = '" + product + "' and date <= '" + forDate + "'")
.list();
resultFrom = transferFrom.find(
"where stockAreasFrom = '" + stockAreas + "' and product = '" + product + "' and date <= '" + forDate + "'")
.list();
} else {
resultTo = transferTo.find(
"where stockAreasTo = '" + stockAreas + "' and product = '" + product + "' and date <= '" + forDate
+ "' and date > '" + lastCorrectionDate + "'").list();
resultFrom = transferFrom.find(
"where stockAreasFrom = '" + stockAreas + "' and product = '" + product + "' and date <= '" + forDate
+ "' and date > '" + lastCorrectionDate + "'").list();
}
for (Entity e : resultTo.getEntities()) {
quantity = (BigDecimal) e.getField("quantity");
countProductIn = countProductIn.add(quantity);
}
for (Entity e : resultFrom.getEntities()) {
quantity = (BigDecimal) e.getField("quantity");
countProductOut = countProductOut.add(quantity);
}
if (lastCorrectionDate == null) {
countProductIn = countProductIn.subtract(countProductOut);
} else {
countProductIn = countProductIn.add(countProduct);
countProductIn = countProductIn.subtract(countProductOut);
}
if (countProductIn.compareTo(BigDecimal.ZERO) == -1)
countProductIn = BigDecimal.ZERO;
return countProductIn;
}
|
public BigDecimal calculateShouldBeInStockArea(final String stockAreas, final String product, final String forDate) {
BigDecimal countProductIn = BigDecimal.ZERO;
BigDecimal countProductOut = BigDecimal.ZERO;
BigDecimal countProduct = BigDecimal.ZERO;
Date lastCorrectionDate = null;
DataDefinition transferDataCorrection = dataDefinitionService.get("materialFlow", "stockCorrection");
DataDefinition transferTo = dataDefinitionService.get("materialFlow", "transfer");
DataDefinition transferFrom = dataDefinitionService.get("materialFlow", "transfer");
Long stockAreasId = Long.valueOf(stockAreas);
Long productId = Long.valueOf(product);
Entity resultDataCorrection = transferDataCorrection.find().add(SearchRestrictions.eq("stockAreas.id", stockAreasId))
.add(SearchRestrictions.eq("product.id", productId)).addOrder(SearchOrders.desc("stockCorrectionDate"))
.setMaxResults(1).uniqueResult();
if (resultDataCorrection != null) {
lastCorrectionDate = (Date) resultDataCorrection.getField("stockCorrectionDate");
countProduct = (BigDecimal) resultDataCorrection.getField("found");
}
SearchResult resultTo = null;
SearchResult resultFrom = null;
if (lastCorrectionDate == null) {
resultTo = transferTo.find(
"where stockAreasTo = '" + stockAreas + "' and product = '" + product + "' and date <= '" + forDate + "'")
.list();
resultFrom = transferFrom.find(
"where stockAreasFrom = '" + stockAreas + "' and product = '" + product + "' and date <= '" + forDate + "'")
.list();
} else {
resultTo = transferTo.find(
"where stockAreasTo = '" + stockAreas + "' and product = '" + product + "' and date <= '" + forDate
+ "' and date > '" + lastCorrectionDate + "'").list();
resultFrom = transferFrom.find(
"where stockAreasFrom = '" + stockAreas + "' and product = '" + product + "' and date <= '" + forDate
+ "' and date > '" + lastCorrectionDate + "'").list();
}
for (Entity e : resultTo.getEntities()) {
BigDecimal quantity = (BigDecimal) e.getField("quantity");
countProductIn = countProductIn.add(quantity);
}
for (Entity e : resultFrom.getEntities()) {
BigDecimal quantity = (BigDecimal) e.getField("quantity");
countProductOut = countProductOut.add(quantity);
}
if (lastCorrectionDate == null) {
countProductIn = countProductIn.subtract(countProductOut);
} else {
countProductIn = countProductIn.add(countProduct);
countProductIn = countProductIn.subtract(countProductOut);
}
if (countProductIn.compareTo(BigDecimal.ZERO) == -1)
countProductIn = BigDecimal.ZERO;
return countProductIn;
}
|
diff --git a/fog.routing.hrm/src/de/tuilmenau/ics/fog/ui/eclipse/commands/hierarchical/LinkToNode.java b/fog.routing.hrm/src/de/tuilmenau/ics/fog/ui/eclipse/commands/hierarchical/LinkToNode.java
index f618325c..05cc64ce 100644
--- a/fog.routing.hrm/src/de/tuilmenau/ics/fog/ui/eclipse/commands/hierarchical/LinkToNode.java
+++ b/fog.routing.hrm/src/de/tuilmenau/ics/fog/ui/eclipse/commands/hierarchical/LinkToNode.java
@@ -1,145 +1,145 @@
/*******************************************************************************
* Forwarding on Gates Simulator/Emulator - Eclipse
* Copyright (c) 2012, Integrated Communication Systems Group, TU Ilmenau.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html.
******************************************************************************/
package de.tuilmenau.ics.fog.ui.eclipse.commands.hierarchical;
import java.util.LinkedList;
import de.tuilmenau.ics.fog.eclipse.ui.commands.EclipseCommand;
import de.tuilmenau.ics.fog.eclipse.ui.dialogs.SelectFromListDialog;
import de.tuilmenau.ics.fog.facade.Description;
import de.tuilmenau.ics.fog.routing.hierarchical.HRMController;
import de.tuilmenau.ics.fog.topology.AutonomousSystem;
import de.tuilmenau.ics.fog.topology.Node;
import de.tuilmenau.ics.fog.ui.Logging;
public class LinkToNode extends EclipseCommand
{
private Node mSourceNode = null;
private Node mSelectedDestinationNode = null;
private AutonomousSystem mAs = null;
private static String sLastSelectedNodeName = null;
private Description mBusAttributes = null;
public LinkToNode()
{
}
/**
* Executes the command
*
* @param pObject the object parameter
*/
@Override
public void execute(Object pObject)
{
Logging.log(this, "INIT - object parameter is " + pObject);
if(pObject instanceof Node) {
mSourceNode = (Node) pObject;
} else if(pObject instanceof HRMController) {
mSourceNode = ((HRMController)pObject).getNode();
} else {
throw new RuntimeException(this +" requires a Node object instead of " + pObject +" to proceed.");
}
mAs = mSourceNode.getAS();
if(mSourceNode != null) {
showBusDialogs();
showNodeDialog();
createBusAndLinkToNodes();
} else {
Logging.err(this, "Missing reference to a Source Node. Can not run 'create link' command.");
}
}
/**
* Triggers the actual link creation process.
*/
private void createBusAndLinkToNodes()
{
if(mSelectedDestinationNode != null){
// create name for bus
String tBusName = "bus_" + mSourceNode.toString() + "_" + mSelectedDestinationNode.toString();
// create the bus, allow multiple bus between same nodes here
String tBusNameTmp = tBusName;
int i = 0;
while(mSourceNode.getAS().containsBus(tBusNameTmp)){
tBusNameTmp = tBusName + "_" + Integer.toString(i);
i++;
}
tBusName = tBusNameTmp;
mSourceNode.getAS().executeCommand("create bus " + tBusName /* TODO: QoS parameter */);
// connect the nodes at both end points of the link
mSourceNode.getAS().executeCommand("connect " + mSourceNode.toString() + " " + tBusName);
mSourceNode.getAS().executeCommand("connect " + mSelectedDestinationNode.toString() + " " + tBusName);
}
}
/**
* Shows the dialog which allows the user to select the destination node.
*/
private void showNodeDialog()
{
// determine how many nodes exist in the network
int tAsNodeCount = mSourceNode.getAS().getNodelist().keySet().size();
// allocate structure for storing names of possible destination nodes
LinkedList<String> tPossibleNodeNames = new LinkedList<String>();
// determine names of possible destination nodes
int i = 0;
int tPreSelectedNodeNr = 0;
Logging.log(this, "Found " + tAsNodeCount + " destination nodes in the current AS \"" + mAs.toString() + "\"");
for(String tNodeName : mSourceNode.getAS().getNodelist().keySet()) {
// check the string array boundaries
if ((sLastSelectedNodeName != null) && (sLastSelectedNodeName == tNodeName)){
Logging.log(this, " ..possible node " + i + ": \"" + tNodeName + "\" [used last time]");
tPreSelectedNodeNr = i;
}else{
Logging.log(this, " ..possible node " + i + ": \"" + tNodeName + "\"");
}
tPossibleNodeNames.add(tNodeName);
i++;
}
// ask the user to which bus should the node be attached to
int tSelectedNodeNr = SelectFromListDialog.open(getSite().getShell(), "Select destination node", "To which node should node " + mSourceNode.toString() + " have a new link?", tPreSelectedNodeNr, tPossibleNodeNames);
Logging.log(this, "Source node: " + mSourceNode);
- if(tSelectedNodeNr > 0){
+ if(tSelectedNodeNr > -1){
String tNodeName = tPossibleNodeNames.get(tSelectedNodeNr);
mSelectedDestinationNode = mAs.getNodeByName(tNodeName);
Logging.log(this, "Selected destination node: " + mSelectedDestinationNode.toString() + "(" + tNodeName + ")");
if (mSelectedDestinationNode != null){
// store the selected bus name for the next time
sLastSelectedNodeName = tNodeName;
}else{
Logging.warn(this, "Invalid destination node found");
}
}else{
Logging.log(this, "User canceled the dialog");
}
}
/**
* Shows the dialog which allows the user to select the attributes for the bus which has to be created for the desired link.
*/
private void showBusDialogs()
{
mBusAttributes = new Description();
//TODO: show dialog to allow user to select link attributes
}
}
| true
| true
|
private void showNodeDialog()
{
// determine how many nodes exist in the network
int tAsNodeCount = mSourceNode.getAS().getNodelist().keySet().size();
// allocate structure for storing names of possible destination nodes
LinkedList<String> tPossibleNodeNames = new LinkedList<String>();
// determine names of possible destination nodes
int i = 0;
int tPreSelectedNodeNr = 0;
Logging.log(this, "Found " + tAsNodeCount + " destination nodes in the current AS \"" + mAs.toString() + "\"");
for(String tNodeName : mSourceNode.getAS().getNodelist().keySet()) {
// check the string array boundaries
if ((sLastSelectedNodeName != null) && (sLastSelectedNodeName == tNodeName)){
Logging.log(this, " ..possible node " + i + ": \"" + tNodeName + "\" [used last time]");
tPreSelectedNodeNr = i;
}else{
Logging.log(this, " ..possible node " + i + ": \"" + tNodeName + "\"");
}
tPossibleNodeNames.add(tNodeName);
i++;
}
// ask the user to which bus should the node be attached to
int tSelectedNodeNr = SelectFromListDialog.open(getSite().getShell(), "Select destination node", "To which node should node " + mSourceNode.toString() + " have a new link?", tPreSelectedNodeNr, tPossibleNodeNames);
Logging.log(this, "Source node: " + mSourceNode);
if(tSelectedNodeNr > 0){
String tNodeName = tPossibleNodeNames.get(tSelectedNodeNr);
mSelectedDestinationNode = mAs.getNodeByName(tNodeName);
Logging.log(this, "Selected destination node: " + mSelectedDestinationNode.toString() + "(" + tNodeName + ")");
if (mSelectedDestinationNode != null){
// store the selected bus name for the next time
sLastSelectedNodeName = tNodeName;
}else{
Logging.warn(this, "Invalid destination node found");
}
}else{
Logging.log(this, "User canceled the dialog");
}
}
|
private void showNodeDialog()
{
// determine how many nodes exist in the network
int tAsNodeCount = mSourceNode.getAS().getNodelist().keySet().size();
// allocate structure for storing names of possible destination nodes
LinkedList<String> tPossibleNodeNames = new LinkedList<String>();
// determine names of possible destination nodes
int i = 0;
int tPreSelectedNodeNr = 0;
Logging.log(this, "Found " + tAsNodeCount + " destination nodes in the current AS \"" + mAs.toString() + "\"");
for(String tNodeName : mSourceNode.getAS().getNodelist().keySet()) {
// check the string array boundaries
if ((sLastSelectedNodeName != null) && (sLastSelectedNodeName == tNodeName)){
Logging.log(this, " ..possible node " + i + ": \"" + tNodeName + "\" [used last time]");
tPreSelectedNodeNr = i;
}else{
Logging.log(this, " ..possible node " + i + ": \"" + tNodeName + "\"");
}
tPossibleNodeNames.add(tNodeName);
i++;
}
// ask the user to which bus should the node be attached to
int tSelectedNodeNr = SelectFromListDialog.open(getSite().getShell(), "Select destination node", "To which node should node " + mSourceNode.toString() + " have a new link?", tPreSelectedNodeNr, tPossibleNodeNames);
Logging.log(this, "Source node: " + mSourceNode);
if(tSelectedNodeNr > -1){
String tNodeName = tPossibleNodeNames.get(tSelectedNodeNr);
mSelectedDestinationNode = mAs.getNodeByName(tNodeName);
Logging.log(this, "Selected destination node: " + mSelectedDestinationNode.toString() + "(" + tNodeName + ")");
if (mSelectedDestinationNode != null){
// store the selected bus name for the next time
sLastSelectedNodeName = tNodeName;
}else{
Logging.warn(this, "Invalid destination node found");
}
}else{
Logging.log(this, "User canceled the dialog");
}
}
|
diff --git a/src/core/TsdbQuery.java b/src/core/TsdbQuery.java
index d8a79d5..07f4c83 100644
--- a/src/core/TsdbQuery.java
+++ b/src/core/TsdbQuery.java
@@ -1,585 +1,587 @@
// This file is part of OpenTSDB.
// Copyright (C) 2010 StumbleUpon, Inc.
//
// This program is free software: you can redistribute it and/or modify it
// under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or (at your
// option) any later version. This program is distributed in the hope that it
// will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
// General Public License for more details. You should have received a copy
// of the GNU Lesser General Public License along with this program. If not,
// see <http://www.gnu.org/licenses/>.
package net.opentsdb.core;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hbase.async.Bytes;
import org.hbase.async.HBaseException;
import org.hbase.async.KeyValue;
import org.hbase.async.Scanner;
import static org.hbase.async.Bytes.ByteMap;
import net.opentsdb.stats.Histogram;
import net.opentsdb.uid.NoSuchUniqueId;
import net.opentsdb.uid.NoSuchUniqueName;
/**
* Non-synchronized implementation of {@link Query}.
*/
final class TsdbQuery implements Query {
private static final Logger LOG = LoggerFactory.getLogger(TsdbQuery.class);
/** Used whenever there are no results. */
private static final DataPoints[] NO_RESULT = new DataPoints[0];
/**
* Keep track of the latency we perceive when doing Scans on HBase.
* We want buckets up to 16s, with 2 ms interval between each bucket up to
* 100 ms after we which we switch to exponential buckets.
*/
static final Histogram scanlatency = new Histogram(16000, (short) 2, 100);
/**
* Charset to use with our server-side row-filter.
* We use this one because it preserves every possible byte unchanged.
*/
private static final Charset CHARSET = Charset.forName("ISO-8859-1");
/** The TSDB we belong to. */
private final TSDB tsdb;
/** Start time (UNIX timestamp in seconds) on 32 bits ("unsigned" int). */
private int start_time;
/** End time (UNIX timestamp in seconds) on 32 bits ("unsigned" int). */
private int end_time;
/** ID of the metric being looked up. */
private byte[] metric;
/**
* Tags of the metrics being looked up.
* Each tag is a byte array holding the ID of both the name and value
* of the tag.
* Invariant: an element cannot be both in this array and in group_bys.
*/
private ArrayList<byte[]> tags;
/**
* Tags by which we must group the results.
* Each element is a tag ID.
* Invariant: an element cannot be both in this array and in {@code tags}.
*/
private ArrayList<byte[]> group_bys;
/**
* Values we may be grouping on.
* For certain elements in {@code group_bys}, we may have a specific list of
* values IDs we're looking for. Those IDs are stored in this map. The key
* is an element of {@code group_bys} (so a tag name ID) and the values are
* tag value IDs (at least two).
*/
private ByteMap<byte[][]> group_by_values;
/** If true, use rate of change instead of actual values. */
private boolean rate;
/** Aggregator function to use. */
private Aggregator aggregator;
/**
* Downsampling function to use, if any (can be {@code null}).
* If this is non-null, {@code sample_interval} must be strictly positive.
*/
private Aggregator downsampler;
/** Minimum time interval (in seconds) wanted between each data point. */
private int sample_interval;
/** Constructor. */
public TsdbQuery(final TSDB tsdb) {
this.tsdb = tsdb;
}
public void setStartTime(final long timestamp) {
if ((timestamp & 0xFFFFFFFF00000000L) != 0) {
throw new IllegalArgumentException("Invalid timestamp: " + timestamp);
} else if (end_time != 0 && timestamp >= getEndTime()) {
throw new IllegalArgumentException("new start time (" + timestamp
+ ") is greater than or equal to end time: " + getEndTime());
}
// Keep the 32 bits.
start_time = (int) timestamp;
}
public long getStartTime() {
if (start_time == 0) {
throw new IllegalStateException("setStartTime was never called!");
}
return start_time & 0x00000000FFFFFFFFL;
}
public void setEndTime(final long timestamp) {
if ((timestamp & 0xFFFFFFFF00000000L) != 0) {
throw new IllegalArgumentException("Invalid timestamp: " + timestamp);
} else if (start_time != 0 && timestamp <= getStartTime()) {
throw new IllegalArgumentException("new end time (" + timestamp
+ ") is less than or equal to start time: " + getStartTime());
}
// Keep the 32 bits.
end_time = (int) timestamp;
}
public long getEndTime() {
if (end_time == 0) {
setEndTime(System.currentTimeMillis() / 1000);
}
return end_time;
}
public void setTimeSeries(final String metric,
final Map<String, String> tags,
final Aggregator function,
final boolean rate) throws NoSuchUniqueName {
findGroupBys(tags);
this.metric = tsdb.metrics.getId(metric);
this.tags = Tags.resolveAll(tsdb, tags);
aggregator = function;
this.rate = rate;
}
public void downsample(final int interval, final Aggregator downsampler) {
if (downsampler == null) {
throw new NullPointerException("downsampler");
} else if (interval <= 0) {
throw new IllegalArgumentException("interval not > 0: " + interval);
}
this.downsampler = downsampler;
this.sample_interval = interval;
}
/**
* Extracts all the tags we must use to group results.
* <ul>
* <li>If a tag has the form {@code name=*} then we'll create one
* group per value we find for that tag.</li>
* <li>If a tag has the form {@code name={v1,v2,..,vN}} then we'll
* create {@code N} groups.</li>
* </ul>
* In the both cases above, {@code name} will be stored in the
* {@code group_bys} attribute. In the second case specifically,
* the {@code N} values would be stored in {@code group_by_values},
* the key in this map being {@code name}.
* @param tags The tags from which to extract the 'GROUP BY's.
* Each tag that represents a 'GROUP BY' will be removed from the map
* passed in argument.
*/
private void findGroupBys(final Map<String, String> tags) {
final Iterator<Map.Entry<String, String>> i = tags.entrySet().iterator();
while (i.hasNext()) {
final Map.Entry<String, String> tag = i.next();
final String tagvalue = tag.getValue();
if (tagvalue.equals("*") // 'GROUP BY' with any value.
|| tagvalue.indexOf('|', 1) >= 0) { // Multiple possible values.
if (group_bys == null) {
group_bys = new ArrayList<byte[]>();
}
group_bys.add(tsdb.tag_names.getId(tag.getKey()));
i.remove();
if (tagvalue.charAt(0) == '*') {
continue; // For a 'GROUP BY' with any value, we're done.
}
// 'GROUP BY' with specific values. Need to split the values
// to group on and store their IDs in group_by_values.
final String[] values = Tags.splitString(tagvalue, '|');
if (group_by_values == null) {
group_by_values = new ByteMap<byte[][]>();
}
final short value_width = tsdb.tag_values.width();
final byte[][] value_ids = new byte[values.length][value_width];
group_by_values.put(tsdb.tag_names.getId(tag.getKey()),
value_ids);
for (int j = 0; j < values.length; j++) {
final byte[] value_id = tsdb.tag_values.getId(values[j]);
System.arraycopy(value_id, 0, value_ids[j], 0, value_width);
}
}
}
}
public DataPoints[] run() throws HBaseException {
return groupByAndAggregate(findSpans());
}
/**
* Finds all the {@link Span}s that match this query.
* This is what actually scans the HBase table and loads the data into
* {@link Span}s.
* @return A map from HBase row key to the {@link Span} for that row key.
* Since a {@link Span} actually contains multiple HBase rows, the row key
* stored in the map has its timestamp zero'ed out.
* @throws HBaseException if there was a problem communicating with HBase to
* perform the search.
* @throws IllegalArgumentException if bad data was retreived from HBase.
*/
private TreeMap<byte[], Span> findSpans() throws HBaseException {
final short metric_width = tsdb.metrics.width();
final TreeMap<byte[], Span> spans = // The key is a row key from HBase.
new TreeMap<byte[], Span>(new SpanCmp(metric_width));
int nrows = 0;
int hbase_time = 0; // milliseconds.
long starttime = System.nanoTime();
final Scanner scanner = getScanner();
try {
ArrayList<ArrayList<KeyValue>> rows;
while ((rows = scanner.nextRows().joinUninterruptibly()) != null) {
hbase_time += (System.nanoTime() - starttime) / 1000000;
for (final ArrayList<KeyValue> row : rows) {
final byte[] key = row.get(0).key();
if (Bytes.memcmp(metric, key, 0, metric_width) != 0) {
throw new AssertionError("HBase returned a row that doesn't match"
+ " our scanner (" + scanner + ")! " + row + " does not start"
+ " with " + Arrays.toString(metric));
}
Span datapoints = spans.get(key);
if (datapoints == null) {
datapoints = new Span(tsdb);
spans.put(key, datapoints);
}
datapoints.addRow(row);
nrows++;
starttime = System.nanoTime();
}
}
} catch (HBaseException e) {
throw e;
} catch (IllegalArgumentException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException("Should never be here", e);
} finally {
hbase_time += (System.nanoTime() - starttime) / 1000000;
scanlatency.add(hbase_time);
}
LOG.info(this + " matched " + nrows + " rows in " + spans.size() + " spans");
if (nrows == 0) {
return null;
}
return spans;
}
/**
* Creates the {@link SpanGroup}s to form the final results of this query.
* @param spans The {@link Span}s found for this query ({@link #findSpans}).
* Can be {@code null}, in which case the array returned will be empty.
* @return A possibly empty array of {@link SpanGroup}s built according to
* any 'GROUP BY' formulated in this query.
*/
private DataPoints[] groupByAndAggregate(final TreeMap<byte[], Span> spans) {
if (spans == null || spans.size() <= 0) {
return NO_RESULT;
}
if (group_bys == null) {
// We haven't been asked to find groups, so let's put all the spans
// together in the same group.
final SpanGroup group = new SpanGroup(tsdb,
getStartTime(), getEndTime(),
spans.values(),
rate,
aggregator,
sample_interval, downsampler);
return new SpanGroup[] { group };
}
// Maps group value IDs to the SpanGroup for those values. Say we've
// been asked to group by two things: foo=* bar=* Then the keys in this
// map will contain all the value IDs combinations we've seen. If the
// name IDs for `foo' and `bar' are respectively [0, 0, 7] and [0, 0, 2]
// then we'll have group_bys=[[0, 0, 2], [0, 0, 7]] (notice it's sorted
// by ID, so bar is first) and say we find foo=LOL bar=OMG as well as
// foo=LOL bar=WTF and that the IDs of the tag values are:
// LOL=[0, 0, 1] OMG=[0, 0, 4] WTF=[0, 0, 3]
// then the map will have two keys:
// - one for the LOL-OMG combination: [0, 0, 1, 0, 0, 4] and,
// - one for the LOL-WTF combination: [0, 0, 1, 0, 0, 3].
final ByteMap<SpanGroup> groups = new ByteMap<SpanGroup>();
final short metric_ts_bytes = (short) (tsdb.metrics.width()
+ Const.TIMESTAMP_BYTES);
final short name_width = tsdb.tag_names.width();
final short value_width = tsdb.tag_values.width();
final short tag_bytes = (short) (name_width + value_width);
final byte[] group = new byte[group_bys.size() * value_width];
for (final Map.Entry<byte[], Span> entry : spans.entrySet()) {
final byte[] row = entry.getKey();
byte[] value_id = null;
int i = 0;
// TODO(tsuna): The following loop has a quadratic behavior. We can
// make it much better since both the row key and group_bys are sorted.
for (final byte[] tag_id : group_bys) {
value_id = Tags.getValueId(tsdb, row, tag_id);
if (value_id == null) {
break;
}
System.arraycopy(value_id, 0, group, i, value_width);
i += value_width;
}
if (value_id == null) {
- LOG.info("Dropping span: " + Arrays.toString(row));
+ LOG.error("WTF? Dropping span for row " + Arrays.toString(row)
+ + " as it had no matching tag from the requested groups,"
+ + " which is unexpected. Query=" + this);
continue;
}
//LOG.info("Span belongs to group " + Arrays.toString(group) + ": " + Arrays.toString(row));
SpanGroup thegroup = groups.get(group);
if (thegroup == null) {
thegroup = new SpanGroup(tsdb, getStartTime(), getEndTime(),
null, rate, aggregator,
sample_interval, downsampler);
// Copy the array because we're going to keep `group' and overwrite
// its contents. So we want the collection to have an immutable copy.
final byte[] group_copy = new byte[group.length];
System.arraycopy(group, 0, group_copy, 0, group.length);
groups.put(group_copy, thegroup);
}
thegroup.add(entry.getValue());
}
//for (final Map.Entry<byte[], SpanGroup> entry : groups) {
// LOG.info("group for " + Arrays.toString(entry.getKey()) + ": " + entry.getValue());
//}
return groups.values().toArray(new SpanGroup[groups.size()]);
}
/**
* Creates the {@link Scanner} to use for this query.
*/
private Scanner getScanner() throws HBaseException {
final short metric_width = tsdb.metrics.width();
final byte[] start_row = new byte[metric_width + Const.TIMESTAMP_BYTES];
final byte[] end_row = new byte[metric_width + Const.TIMESTAMP_BYTES];
// We search MAX_TIMESPAN seconds before and after the end time as it's
// quite likely that the exact timestamp we're looking for is in the
// middle of a row. Additionally, in case our sample_interval is large,
// we need to look even further before/after, so use that too.
Bytes.setInt(start_row, start_time - Const.MAX_TIMESPAN - sample_interval,
metric_width);
Bytes.setInt(end_row, (end_time == 0
? -1 // Will scan until the end (0xFFF...).
: end_time + Const.MAX_TIMESPAN + sample_interval),
metric_width);
System.arraycopy(metric, 0, start_row, 0, metric_width);
System.arraycopy(metric, 0, end_row, 0, metric_width);
final Scanner scanner = tsdb.client.newScanner(tsdb.table);
scanner.setStartKey(start_row);
scanner.setStopKey(end_row);
if (tags.size() > 0 || group_bys != null) {
createAndSetFilter(scanner);
}
scanner.setFamily(TSDB.FAMILY);
return scanner;
}
/**
* Sets the server-side regexp filter on the scanner.
* In order to find the rows with the relevant tags, we use a
* server-side filter that matches a regular expression on the row key.
* @param scanner The scanner on which to add the filter.
*/
void createAndSetFilter(final Scanner scanner) {
if (group_bys != null) {
Collections.sort(group_bys, Bytes.MEMCMP);
}
final short name_width = tsdb.tag_names.width();
final short value_width = tsdb.tag_values.width();
final short tagsize = (short) (name_width + value_width);
// Generate a regexp for our tags. Say we have 2 tags: { 0 0 1 0 0 2 }
// and { 4 5 6 9 8 7 }, the regexp will be:
// "^.{7}(?:.{6})*\\Q\000\000\001\000\000\002\\E(?:.{6})*\\Q\004\005\006\011\010\007\\E(?:.{6})*$"
final StringBuilder buf = new StringBuilder(
15 // "^.{N}" + "(?:.{M})*" + "$"
+ ((13 + tagsize) // "(?:.{M})*\\Q" + tagsize bytes + "\\E"
* (tags.size() + (group_bys == null ? 0 : group_bys.size() * 3))));
// In order to avoid re-allocations, reserve a bit more w/ groups ^^^
// Alright, let's build this regexp. From the beginning...
buf.append("(?s)" // Ensure we use the DOTALL flag.
+ "^.{")
// ... start by skipping the metric ID and timestamp.
.append(tsdb.metrics.width() + Const.TIMESTAMP_BYTES)
.append("}");
final Iterator<byte[]> tags = this.tags.iterator();
final Iterator<byte[]> group_bys = (this.group_bys == null
? new ArrayList<byte[]>(0).iterator()
: this.group_bys.iterator());
byte[] tag = tags.hasNext() ? tags.next() : null;
byte[] group_by = group_bys.hasNext() ? group_bys.next() : null;
// Tags and group_bys are already sorted. We need to put them in the
// regexp in order by ID, which means we just merge two sorted lists.
do {
// Skip any number of tags.
buf.append("(?:.{").append(tagsize).append("})*\\Q");
if (isTagNext(name_width, tag, group_by)) {
addId(buf, tag);
tag = tags.hasNext() ? tags.next() : null;
} else { // Add a group_by.
addId(buf, group_by);
final byte[][] value_ids = (group_by_values == null
? null
: group_by_values.get(group_by));
if (value_ids == null) { // We don't want any specific ID...
buf.append(".{").append(value_width).append('}'); // Any value ID.
} else { // We want specific IDs. List them: /(AAA|BBB|CCC|..)/
buf.append("(?:");
for (final byte[] value_id : value_ids) {
buf.append("\\Q");
addId(buf, value_id);
buf.append('|');
}
// Replace the pipe of the last iteration.
buf.setCharAt(buf.length() - 1, ')');
}
group_by = group_bys.hasNext() ? group_bys.next() : null;
}
} while (tag != group_by); // Stop when they both become null.
// Skip any number of tags before the end.
buf.append("(?:.{").append(tagsize).append("})*$");
scanner.setKeyRegexp(buf.toString(), CHARSET);
}
/**
* Helper comparison function to compare tag name IDs.
* @param name_width Number of bytes used by a tag name ID.
* @param tag A tag (array containing a tag name ID and a tag value ID).
* @param group_by A tag name ID.
* @return {@code true} number if {@code tag} should be used next (because
* it contains a smaller ID), {@code false} otherwise.
*/
private boolean isTagNext(final short name_width,
final byte[] tag,
final byte[] group_by) {
if (tag == null) {
return false;
} else if (group_by == null) {
return true;
}
final int cmp = Bytes.memcmp(tag, group_by, 0, name_width);
if (cmp == 0) {
throw new AssertionError("invariant violation: tag ID "
+ Arrays.toString(group_by) + " is both in 'tags' and"
+ " 'group_bys' in " + this);
}
return cmp < 0;
}
/**
* Appends the given ID to the given buffer, followed by "\\E".
*/
private static void addId(final StringBuilder buf, final byte[] id) {
for (final byte b : id) {
buf.append((char) (b & 0xFF));
if (b == '\\') { // Escape the escape characters that are in the ID.
buf.append('\\');
}
}
buf.append("\\E");
}
public String toString() {
final StringBuilder buf = new StringBuilder();
buf.append("TsdbQuery(start_time=")
.append(getStartTime())
.append(", end_time=")
.append(getEndTime())
.append(", metric=").append(Arrays.toString(metric));
try {
buf.append(" (").append(tsdb.metrics.getName(metric));
} catch (NoSuchUniqueId e) {
buf.append(" (<").append(e.getMessage()).append('>');
}
try {
buf.append("), tags=").append(Tags.resolveIds(tsdb, tags));
} catch (NoSuchUniqueId e) {
buf.append("), tags=<").append(e.getMessage()).append('>');
}
buf.append(", rate=").append(rate)
.append(", aggregator=").append(aggregator)
.append(", group_bys=(");
if (group_bys != null) {
for (final byte[] tag_id : group_bys) {
try {
buf.append(tsdb.tag_names.getName(tag_id));
} catch (NoSuchUniqueId e) {
buf.append('<').append(e.getMessage()).append('>');
}
buf.append(' ')
.append(Arrays.toString(tag_id));
if (group_by_values != null) {
final byte[][] value_ids = group_by_values.get(tag_id);
if (value_ids == null) {
continue;
}
buf.append("={");
for (final byte[] value_id : value_ids) {
try {
buf.append(tsdb.tag_values.getName(value_id));
} catch (NoSuchUniqueId e) {
buf.append('<').append(e.getMessage()).append('>');
}
buf.append(' ')
.append(Arrays.toString(value_id))
.append(", ");
}
buf.append('}');
}
buf.append(", ");
}
}
buf.append("))");
return buf.toString();
}
/**
* Comparator that ignores timestamps in row keys.
*/
private static final class SpanCmp implements Comparator<byte[]> {
private final short metric_width;
public SpanCmp(final short metric_width) {
this.metric_width = metric_width;
}
public int compare(final byte[] a, final byte[] b) {
final int length = Math.min(a.length, b.length);
if (a == b) { // Do this after accessing a.length and b.length
return 0; // in order to NPE if either a or b is null.
}
int i;
// First compare the metric ID.
for (i = 0; i < metric_width; i++) {
if (a[i] != b[i]) {
return (a[i] & 0xFF) - (b[i] & 0xFF); // "promote" to unsigned.
}
}
// Then skip the timestamp and compare the rest.
for (i += Const.TIMESTAMP_BYTES; i < length; i++) {
if (a[i] != b[i]) {
return (a[i] & 0xFF) - (b[i] & 0xFF); // "promote" to unsigned.
}
}
return a.length - b.length;
}
}
}
| true
| true
|
private DataPoints[] groupByAndAggregate(final TreeMap<byte[], Span> spans) {
if (spans == null || spans.size() <= 0) {
return NO_RESULT;
}
if (group_bys == null) {
// We haven't been asked to find groups, so let's put all the spans
// together in the same group.
final SpanGroup group = new SpanGroup(tsdb,
getStartTime(), getEndTime(),
spans.values(),
rate,
aggregator,
sample_interval, downsampler);
return new SpanGroup[] { group };
}
// Maps group value IDs to the SpanGroup for those values. Say we've
// been asked to group by two things: foo=* bar=* Then the keys in this
// map will contain all the value IDs combinations we've seen. If the
// name IDs for `foo' and `bar' are respectively [0, 0, 7] and [0, 0, 2]
// then we'll have group_bys=[[0, 0, 2], [0, 0, 7]] (notice it's sorted
// by ID, so bar is first) and say we find foo=LOL bar=OMG as well as
// foo=LOL bar=WTF and that the IDs of the tag values are:
// LOL=[0, 0, 1] OMG=[0, 0, 4] WTF=[0, 0, 3]
// then the map will have two keys:
// - one for the LOL-OMG combination: [0, 0, 1, 0, 0, 4] and,
// - one for the LOL-WTF combination: [0, 0, 1, 0, 0, 3].
final ByteMap<SpanGroup> groups = new ByteMap<SpanGroup>();
final short metric_ts_bytes = (short) (tsdb.metrics.width()
+ Const.TIMESTAMP_BYTES);
final short name_width = tsdb.tag_names.width();
final short value_width = tsdb.tag_values.width();
final short tag_bytes = (short) (name_width + value_width);
final byte[] group = new byte[group_bys.size() * value_width];
for (final Map.Entry<byte[], Span> entry : spans.entrySet()) {
final byte[] row = entry.getKey();
byte[] value_id = null;
int i = 0;
// TODO(tsuna): The following loop has a quadratic behavior. We can
// make it much better since both the row key and group_bys are sorted.
for (final byte[] tag_id : group_bys) {
value_id = Tags.getValueId(tsdb, row, tag_id);
if (value_id == null) {
break;
}
System.arraycopy(value_id, 0, group, i, value_width);
i += value_width;
}
if (value_id == null) {
LOG.info("Dropping span: " + Arrays.toString(row));
continue;
}
//LOG.info("Span belongs to group " + Arrays.toString(group) + ": " + Arrays.toString(row));
SpanGroup thegroup = groups.get(group);
if (thegroup == null) {
thegroup = new SpanGroup(tsdb, getStartTime(), getEndTime(),
null, rate, aggregator,
sample_interval, downsampler);
// Copy the array because we're going to keep `group' and overwrite
// its contents. So we want the collection to have an immutable copy.
final byte[] group_copy = new byte[group.length];
System.arraycopy(group, 0, group_copy, 0, group.length);
groups.put(group_copy, thegroup);
}
thegroup.add(entry.getValue());
}
//for (final Map.Entry<byte[], SpanGroup> entry : groups) {
// LOG.info("group for " + Arrays.toString(entry.getKey()) + ": " + entry.getValue());
//}
return groups.values().toArray(new SpanGroup[groups.size()]);
}
|
private DataPoints[] groupByAndAggregate(final TreeMap<byte[], Span> spans) {
if (spans == null || spans.size() <= 0) {
return NO_RESULT;
}
if (group_bys == null) {
// We haven't been asked to find groups, so let's put all the spans
// together in the same group.
final SpanGroup group = new SpanGroup(tsdb,
getStartTime(), getEndTime(),
spans.values(),
rate,
aggregator,
sample_interval, downsampler);
return new SpanGroup[] { group };
}
// Maps group value IDs to the SpanGroup for those values. Say we've
// been asked to group by two things: foo=* bar=* Then the keys in this
// map will contain all the value IDs combinations we've seen. If the
// name IDs for `foo' and `bar' are respectively [0, 0, 7] and [0, 0, 2]
// then we'll have group_bys=[[0, 0, 2], [0, 0, 7]] (notice it's sorted
// by ID, so bar is first) and say we find foo=LOL bar=OMG as well as
// foo=LOL bar=WTF and that the IDs of the tag values are:
// LOL=[0, 0, 1] OMG=[0, 0, 4] WTF=[0, 0, 3]
// then the map will have two keys:
// - one for the LOL-OMG combination: [0, 0, 1, 0, 0, 4] and,
// - one for the LOL-WTF combination: [0, 0, 1, 0, 0, 3].
final ByteMap<SpanGroup> groups = new ByteMap<SpanGroup>();
final short metric_ts_bytes = (short) (tsdb.metrics.width()
+ Const.TIMESTAMP_BYTES);
final short name_width = tsdb.tag_names.width();
final short value_width = tsdb.tag_values.width();
final short tag_bytes = (short) (name_width + value_width);
final byte[] group = new byte[group_bys.size() * value_width];
for (final Map.Entry<byte[], Span> entry : spans.entrySet()) {
final byte[] row = entry.getKey();
byte[] value_id = null;
int i = 0;
// TODO(tsuna): The following loop has a quadratic behavior. We can
// make it much better since both the row key and group_bys are sorted.
for (final byte[] tag_id : group_bys) {
value_id = Tags.getValueId(tsdb, row, tag_id);
if (value_id == null) {
break;
}
System.arraycopy(value_id, 0, group, i, value_width);
i += value_width;
}
if (value_id == null) {
LOG.error("WTF? Dropping span for row " + Arrays.toString(row)
+ " as it had no matching tag from the requested groups,"
+ " which is unexpected. Query=" + this);
continue;
}
//LOG.info("Span belongs to group " + Arrays.toString(group) + ": " + Arrays.toString(row));
SpanGroup thegroup = groups.get(group);
if (thegroup == null) {
thegroup = new SpanGroup(tsdb, getStartTime(), getEndTime(),
null, rate, aggregator,
sample_interval, downsampler);
// Copy the array because we're going to keep `group' and overwrite
// its contents. So we want the collection to have an immutable copy.
final byte[] group_copy = new byte[group.length];
System.arraycopy(group, 0, group_copy, 0, group.length);
groups.put(group_copy, thegroup);
}
thegroup.add(entry.getValue());
}
//for (final Map.Entry<byte[], SpanGroup> entry : groups) {
// LOG.info("group for " + Arrays.toString(entry.getKey()) + ": " + entry.getValue());
//}
return groups.values().toArray(new SpanGroup[groups.size()]);
}
|
diff --git a/tests/org.eclipse.m2m.atl.tests/src/org/eclipse/m2m/atl/tests/util/WikiOutputter.java b/tests/org.eclipse.m2m.atl.tests/src/org/eclipse/m2m/atl/tests/util/WikiOutputter.java
index 8f494469..3dd4efcf 100644
--- a/tests/org.eclipse.m2m.atl.tests/src/org/eclipse/m2m/atl/tests/util/WikiOutputter.java
+++ b/tests/org.eclipse.m2m.atl.tests/src/org/eclipse/m2m/atl/tests/util/WikiOutputter.java
@@ -1,128 +1,130 @@
/*******************************************************************************
* Copyright (c) 2008 Obeo.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Obeo - Wiki utilities
*******************************************************************************/
package org.eclipse.m2m.atl.tests.util;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.Map.Entry;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* Utility class for outputting wiki code.
*
* @author William Piers <a href="mailto:william.piers@obeo.fr">william.piers@obeo.fr</a>
*/
public class WikiOutputter {
private final static String testsPath = "results/tests.properties";
private final static String headerPath = "results/header.txt";
private final static String wikiPath = "results/wiki_table.txt";
private final static String bottomPath = "results/bottom.txt";
private final static String emfvmDataPath = "results/EMFVM_results.xml";
private final static String vmDataPath = "results/VM_results.xml";
private static Map directories = new HashMap();
/**
* This is a simple main which attempts to simplificate the management of
* the results. Launch, then copy an past the "wiki_table.txt" file into
* this : http://wiki.eclipse.org/ATL_VM_Testing#Non-regression_tests
* @param args
*/
public static void main(String[] args) {
try {
Map vmResults = initResults(vmDataPath);
Map emfVMResults = initResults(emfvmDataPath);
FileWriter fw = new FileWriter(wikiPath);
fw.write(getFragment(headerPath));
Properties tests = new Properties();
FileInputStream fis = new FileInputStream(new File(testsPath));
tests.load(fis);
for (Iterator iterator = tests.entrySet().iterator(); iterator.hasNext();) {
Entry test = (Entry) iterator.next();
String testName = (String) test.getKey();
String comment = (String) test.getValue();
String emfVMTime = (String) emfVMResults.get(testName);
String vmTime = (String) vmResults.get(testName);
String directory = (String) directories.get(testName);
- fw.write("|-\n");
- fw.write("! colspan=1 | [http://dev.eclipse.org/viewcvs/index.cgi/org.eclipse.m2m/org.eclipse.m2m.atl/tests"+directory.replaceAll("\\\\","/")+"?root=Modeling_Project "+testName+"]\n");
- fw.write("! colspan=1 | <b style=\"color:green\">PASS</b>\n");
- fw.write("! colspan=1 | "+vmTime+"s.\n");
- fw.write("! colspan=1 | "+emfVMTime+"s.\n");
- fw.write("! colspan=1 | "+comment+"\n");
+ if (directory != null) {
+ fw.write("|-\n");
+ fw.write("! colspan=1 | [http://dev.eclipse.org/viewcvs/index.cgi/org.eclipse.m2m/org.eclipse.m2m.atl/tests"+directory.replaceAll("\\\\","/")+"?root=Modeling_Project "+testName+"]\n");
+ fw.write("! colspan=1 | <b style=\"color:green\">PASS</b>\n");
+ fw.write("! colspan=1 | "+vmTime+"s.\n");
+ fw.write("! colspan=1 | "+emfVMTime+"s.\n");
+ fw.write("! colspan=1 | "+comment+"\n");
+ }
}
fw.write("|-\n");
fw.write("! colspan=2 | Total time : \n");
fw.write("! colspan=1 | "+new Double((String)vmResults.get("TOTAL")).floatValue()+"s.\n");
fw.write("! colspan=1 | "+new Double((String)emfVMResults.get("TOTAL")).floatValue()+"s.\n");
fw.write("|}\n\nTests realized on "+new Date()+" with :\n");
fw.write(getFragment(bottomPath));
fw.close();
} catch (Exception e) {
e.printStackTrace();
}
}
private static Map initResults(String path) throws Exception {
Map res= new HashMap();
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();
Document doc = docBuilder.parse (new File(path));
Element rootElement = doc.getDocumentElement();
NodeList childs = rootElement.getChildNodes();
for (int i = 0; i < childs.getLength(); i++) {
Node node = (Node) childs.item(i);
if (node instanceof Element) {
Element element = (Element) node;
String name = element.getAttribute("name");
res.put(name, element.getAttribute("time"));
if (directories.get(name) == null) {
directories.put(name, element.getAttribute("directory"));
}
}
}
return res;
}
private static String getFragment(String path) throws Exception {
StringBuffer content = new StringBuffer();
BufferedReader reader = new BufferedReader(
new FileReader(path));
char[] buf = new char[1024];
int numRead=0;
while((numRead=reader.read(buf)) != -1){
String readData = String.valueOf(buf, 0, numRead);
content.append(readData);
buf = new char[1024];
}
reader.close();
return content.toString();
}
}
| true
| true
|
public static void main(String[] args) {
try {
Map vmResults = initResults(vmDataPath);
Map emfVMResults = initResults(emfvmDataPath);
FileWriter fw = new FileWriter(wikiPath);
fw.write(getFragment(headerPath));
Properties tests = new Properties();
FileInputStream fis = new FileInputStream(new File(testsPath));
tests.load(fis);
for (Iterator iterator = tests.entrySet().iterator(); iterator.hasNext();) {
Entry test = (Entry) iterator.next();
String testName = (String) test.getKey();
String comment = (String) test.getValue();
String emfVMTime = (String) emfVMResults.get(testName);
String vmTime = (String) vmResults.get(testName);
String directory = (String) directories.get(testName);
fw.write("|-\n");
fw.write("! colspan=1 | [http://dev.eclipse.org/viewcvs/index.cgi/org.eclipse.m2m/org.eclipse.m2m.atl/tests"+directory.replaceAll("\\\\","/")+"?root=Modeling_Project "+testName+"]\n");
fw.write("! colspan=1 | <b style=\"color:green\">PASS</b>\n");
fw.write("! colspan=1 | "+vmTime+"s.\n");
fw.write("! colspan=1 | "+emfVMTime+"s.\n");
fw.write("! colspan=1 | "+comment+"\n");
}
fw.write("|-\n");
fw.write("! colspan=2 | Total time : \n");
fw.write("! colspan=1 | "+new Double((String)vmResults.get("TOTAL")).floatValue()+"s.\n");
fw.write("! colspan=1 | "+new Double((String)emfVMResults.get("TOTAL")).floatValue()+"s.\n");
fw.write("|}\n\nTests realized on "+new Date()+" with :\n");
fw.write(getFragment(bottomPath));
fw.close();
} catch (Exception e) {
e.printStackTrace();
}
}
|
public static void main(String[] args) {
try {
Map vmResults = initResults(vmDataPath);
Map emfVMResults = initResults(emfvmDataPath);
FileWriter fw = new FileWriter(wikiPath);
fw.write(getFragment(headerPath));
Properties tests = new Properties();
FileInputStream fis = new FileInputStream(new File(testsPath));
tests.load(fis);
for (Iterator iterator = tests.entrySet().iterator(); iterator.hasNext();) {
Entry test = (Entry) iterator.next();
String testName = (String) test.getKey();
String comment = (String) test.getValue();
String emfVMTime = (String) emfVMResults.get(testName);
String vmTime = (String) vmResults.get(testName);
String directory = (String) directories.get(testName);
if (directory != null) {
fw.write("|-\n");
fw.write("! colspan=1 | [http://dev.eclipse.org/viewcvs/index.cgi/org.eclipse.m2m/org.eclipse.m2m.atl/tests"+directory.replaceAll("\\\\","/")+"?root=Modeling_Project "+testName+"]\n");
fw.write("! colspan=1 | <b style=\"color:green\">PASS</b>\n");
fw.write("! colspan=1 | "+vmTime+"s.\n");
fw.write("! colspan=1 | "+emfVMTime+"s.\n");
fw.write("! colspan=1 | "+comment+"\n");
}
}
fw.write("|-\n");
fw.write("! colspan=2 | Total time : \n");
fw.write("! colspan=1 | "+new Double((String)vmResults.get("TOTAL")).floatValue()+"s.\n");
fw.write("! colspan=1 | "+new Double((String)emfVMResults.get("TOTAL")).floatValue()+"s.\n");
fw.write("|}\n\nTests realized on "+new Date()+" with :\n");
fw.write(getFragment(bottomPath));
fw.close();
} catch (Exception e) {
e.printStackTrace();
}
}
|
diff --git a/src/game/Outcome.java b/src/game/Outcome.java
index b4ce457..29768b9 100644
--- a/src/game/Outcome.java
+++ b/src/game/Outcome.java
@@ -1,93 +1,92 @@
package game;
/**
*
* @author wdencker
*
*/
// Outcome modifies the person after receiving a choice.
public class Outcome {
private boolean alive;
private int charisma;
private int intelligence;
private int strength;
private int wealth;
private int confidence;
private int age;
/**
* @param alive
* is a boolean, indicating whether the decision results in the
* person being alive or dead
* @param charisma
* is the number (+ or -) which indicates how the charisma of the
* person is changed by the decision.
* @param intelligence
* is the number (+ or -) which indicates how the intelligence of
* the person is changed by the decision.
* @param strength
* is the number (+ or -) which indicates how the strength of the
* person is changed by the decision.
* @param wealth
* is the number (+ or -) which indicates how the wealth of the
* person is changed by the decision.
* @param confidence
* is the number (+ or -) which indicates how the wealth of the
* person is changed by the decision.
* @param age
* is the number representing the age of the person after the
* decision.
*/
public Outcome(boolean alive, int charisma, int intelligence, int strength, int wealth, int confidence, int age) {
this.alive = alive;
this.charisma = charisma;
this.intelligence = intelligence;
this.strength = strength;
this.wealth = wealth;
this.confidence = confidence;
this.age = age;
}
/**
* @param p
* is the Person object playing the game that is to be modified.
*/
public void updateAttributes(Person p) {
p.setAlive(alive);
int newStat = p.getCharisma() + charisma;
if (newStat <= 0) {
p.setCharisma(0);
} else {
p.setCharisma(newStat);
}
newStat = p.getIntelligence() + intelligence;
if (newStat <= 0) {
p.setIntelligence(0);
- System.err.println("here");
} else {
p.setIntelligence(newStat);
}
newStat = p.getStrength() + strength;
if (newStat <= 0) {
p.setStrength(0);
} else {
p.setStrength(newStat);
}
newStat = p.getWealth() + wealth;
if (newStat <= 0) {
p.setWealth(0);
} else {
p.setWealth(newStat);
}
newStat = p.getConfidence() + confidence;
if (newStat <= 0) {
p.setConfidence(0);
} else {
p.setConfidence(newStat);
}
p.setAge(age);
}
}
| true
| true
|
public void updateAttributes(Person p) {
p.setAlive(alive);
int newStat = p.getCharisma() + charisma;
if (newStat <= 0) {
p.setCharisma(0);
} else {
p.setCharisma(newStat);
}
newStat = p.getIntelligence() + intelligence;
if (newStat <= 0) {
p.setIntelligence(0);
System.err.println("here");
} else {
p.setIntelligence(newStat);
}
newStat = p.getStrength() + strength;
if (newStat <= 0) {
p.setStrength(0);
} else {
p.setStrength(newStat);
}
newStat = p.getWealth() + wealth;
if (newStat <= 0) {
p.setWealth(0);
} else {
p.setWealth(newStat);
}
newStat = p.getConfidence() + confidence;
if (newStat <= 0) {
p.setConfidence(0);
} else {
p.setConfidence(newStat);
}
p.setAge(age);
}
|
public void updateAttributes(Person p) {
p.setAlive(alive);
int newStat = p.getCharisma() + charisma;
if (newStat <= 0) {
p.setCharisma(0);
} else {
p.setCharisma(newStat);
}
newStat = p.getIntelligence() + intelligence;
if (newStat <= 0) {
p.setIntelligence(0);
} else {
p.setIntelligence(newStat);
}
newStat = p.getStrength() + strength;
if (newStat <= 0) {
p.setStrength(0);
} else {
p.setStrength(newStat);
}
newStat = p.getWealth() + wealth;
if (newStat <= 0) {
p.setWealth(0);
} else {
p.setWealth(newStat);
}
newStat = p.getConfidence() + confidence;
if (newStat <= 0) {
p.setConfidence(0);
} else {
p.setConfidence(newStat);
}
p.setAge(age);
}
|
diff --git a/src/keepcalm/mods/bukkit/asm/BukkitASMLoader.java b/src/keepcalm/mods/bukkit/asm/BukkitASMLoader.java
index e35d723..79c1408 100644
--- a/src/keepcalm/mods/bukkit/asm/BukkitASMLoader.java
+++ b/src/keepcalm/mods/bukkit/asm/BukkitASMLoader.java
@@ -1,29 +1,29 @@
package keepcalm.mods.bukkit.asm;
import java.util.Map;
import cpw.mods.fml.relauncher.IFMLLoadingPlugin;
public class BukkitASMLoader implements IFMLLoadingPlugin {
//public static final boolean isObfuscated = ClassLoader.getSystemClassLoader().getSystemResourceAsStream("net/minecraft/src") == null;
@Override
public String[] getASMTransformerClass() {
return new String[] {"keepcalm.mods.bukkit.asm.transformers.BukkitAccessTransformer", /*"keepcalm.mods.bukkit.asm.transformers.BukkitVanishTransformer", */"keepcalm.mods.bukkit.asm.transformers.events.BlockEventHelpers", "keepcalm.mods.bukkit.asm.transformers.events.EntityEventHelpers"};
}
@Override
public String[] getLibraryRequestClass() {
return new String[] {"keepcalm.mods.bukkit.asm.libraryHandlers.BukkitCommonsLangDownload", "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitEbeanDownload", "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitGSonDownload", "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitSQLiteDownload",
- "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitYAMLDownload", "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitJANSIDownload", "keepcalm.mods.bukkit.as.libraryHandlers.BukkitGuava10Download"};
+ "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitYAMLDownload", "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitJANSIDownload", "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitGuava10Download"};
}
@Override
public String getModContainerClass() {
return null;
}
@Override
public String getSetupClass() {
return null;
}
@Override
public void injectData(Map<String, Object> data) {}
}
| true
| true
|
public String[] getLibraryRequestClass() {
return new String[] {"keepcalm.mods.bukkit.asm.libraryHandlers.BukkitCommonsLangDownload", "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitEbeanDownload", "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitGSonDownload", "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitSQLiteDownload",
"keepcalm.mods.bukkit.asm.libraryHandlers.BukkitYAMLDownload", "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitJANSIDownload", "keepcalm.mods.bukkit.as.libraryHandlers.BukkitGuava10Download"};
}
|
public String[] getLibraryRequestClass() {
return new String[] {"keepcalm.mods.bukkit.asm.libraryHandlers.BukkitCommonsLangDownload", "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitEbeanDownload", "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitGSonDownload", "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitSQLiteDownload",
"keepcalm.mods.bukkit.asm.libraryHandlers.BukkitYAMLDownload", "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitJANSIDownload", "keepcalm.mods.bukkit.asm.libraryHandlers.BukkitGuava10Download"};
}
|
diff --git a/ldapbrowser-core/src/main/java/org/apache/directory/studio/ldapbrowser/core/jobs/InitializeRootDSERunnable.java b/ldapbrowser-core/src/main/java/org/apache/directory/studio/ldapbrowser/core/jobs/InitializeRootDSERunnable.java
index b4051fc3a..633ac3605 100644
--- a/ldapbrowser-core/src/main/java/org/apache/directory/studio/ldapbrowser/core/jobs/InitializeRootDSERunnable.java
+++ b/ldapbrowser-core/src/main/java/org/apache/directory/studio/ldapbrowser/core/jobs/InitializeRootDSERunnable.java
@@ -1,385 +1,385 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.studio.ldapbrowser.core.jobs;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.naming.InvalidNameException;
import org.apache.directory.shared.ldap.constants.SchemaConstants;
import org.apache.directory.shared.ldap.name.LdapDN;
import org.apache.directory.studio.connection.core.Connection;
import org.apache.directory.studio.connection.core.Connection.AliasDereferencingMethod;
import org.apache.directory.studio.connection.core.Connection.ReferralHandlingMethod;
import org.apache.directory.studio.connection.core.jobs.StudioBulkRunnableWithProgress;
import org.apache.directory.studio.connection.core.jobs.StudioProgressMonitor;
import org.apache.directory.studio.ldapbrowser.core.BrowserCoreMessages;
import org.apache.directory.studio.ldapbrowser.core.events.AttributesInitializedEvent;
import org.apache.directory.studio.ldapbrowser.core.events.EventRegistry;
import org.apache.directory.studio.ldapbrowser.core.model.IAttribute;
import org.apache.directory.studio.ldapbrowser.core.model.IBrowserConnection;
import org.apache.directory.studio.ldapbrowser.core.model.IEntry;
import org.apache.directory.studio.ldapbrowser.core.model.IRootDSE;
import org.apache.directory.studio.ldapbrowser.core.model.ISearch;
import org.apache.directory.studio.ldapbrowser.core.model.ISearchResult;
import org.apache.directory.studio.ldapbrowser.core.model.ISearch.SearchScope;
import org.apache.directory.studio.ldapbrowser.core.model.impl.BaseDNEntry;
import org.apache.directory.studio.ldapbrowser.core.model.impl.DirectoryMetadataEntry;
import org.apache.directory.studio.ldapbrowser.core.model.impl.Search;
/**
* Runnable to initialize the Root DSE.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
* @version $Rev$, $Date$
*/
public class InitializeRootDSERunnable implements StudioBulkRunnableWithProgress
{
/** The requested attributes when reading the Root DSE. */
public static final String[] ROOT_DSE_ATTRIBUTES =
{ SchemaConstants.NAMING_CONTEXTS_AT, SchemaConstants.SUBSCHEMA_SUBENTRY_AT,
SchemaConstants.SUPPORTED_LDAP_VERSION_AT, SchemaConstants.SUPPORTED_SASL_MECHANISMS_AT,
SchemaConstants.SUPPORTED_EXTENSION_AT, SchemaConstants.SUPPORTED_CONTROL_AT,
SchemaConstants.SUPPORTED_FEATURES_AT, SchemaConstants.VENDOR_NAME_AT, SchemaConstants.VENDOR_VERSION_AT,
SchemaConstants.ALL_OPERATIONAL_ATTRIBUTES };
private IRootDSE rootDSE;
/**
* Creates a new instance of InitializeRootDSERunnable.
*
* @param rootDSE the root DSE
*/
private InitializeRootDSERunnable( IRootDSE rootDSE )
{
this.rootDSE = rootDSE;
}
/**
* {@inheritDoc}
*/
public Connection[] getConnections()
{
return new Connection[]
{ rootDSE.getBrowserConnection().getConnection() };
}
/**
* {@inheritDoc}
*/
public String getName()
{
return BrowserCoreMessages.jobs__init_entries_title_attonly;
}
/**
* {@inheritDoc}
*/
public Object[] getLockedObjects()
{
return new IEntry[]
{ rootDSE };
}
/**
* {@inheritDoc}
*/
public String getErrorMessage()
{
return BrowserCoreMessages.jobs__init_entries_error_1;
}
/**
* {@inheritDoc}
*/
public void run( StudioProgressMonitor monitor )
{
monitor.beginTask( " ", 3 ); //$NON-NLS-1$
monitor.reportProgress( " " ); //$NON-NLS-1$
monitor.setTaskName( BrowserCoreMessages.bind( BrowserCoreMessages.jobs__init_entries_task, new String[]
{ rootDSE.getDn().getUpName() } ) );
monitor.worked( 1 );
monitor.reportProgress( BrowserCoreMessages.bind( BrowserCoreMessages.jobs__init_entries_progress_att,
new String[]
{ rootDSE.getDn().getUpName() } ) );
loadRootDSE( rootDSE.getBrowserConnection(), monitor );
}
/**
* {@inheritDoc}
*/
public void runNotification()
{
EventRegistry.fireEntryUpdated( new AttributesInitializedEvent( rootDSE ), this );
}
/**
* Loads the Root DSE.
*
* @param browserConnection the browser connection
* @param monitor the progress monitor
*
* @throws Exception the exception
*/
public static synchronized void loadRootDSE( IBrowserConnection browserConnection, StudioProgressMonitor monitor )
{
// clear old children
InitializeChildrenRunnable.clearCaches( browserConnection.getRootDSE(), true );
// delete old attributes
IAttribute[] oldAttributes = browserConnection.getRootDSE().getAttributes();
if ( oldAttributes != null )
{
for ( IAttribute oldAttribute : oldAttributes )
{
browserConnection.getRootDSE().deleteAttribute( oldAttribute );
}
}
// load well-known Root DSE attributes and operational attributes
ISearch search = new Search( null, browserConnection, LdapDN.EMPTY_LDAPDN, ISearch.FILTER_TRUE,
ROOT_DSE_ATTRIBUTES, SearchScope.OBJECT, 0, 0, Connection.AliasDereferencingMethod.NEVER,
Connection.ReferralHandlingMethod.IGNORE, false, null );
SearchRunnable.searchAndUpdateModel( browserConnection, search, monitor );
// load all user attributes
search = new Search( null, browserConnection, LdapDN.EMPTY_LDAPDN, ISearch.FILTER_TRUE, new String[]
{ SchemaConstants.ALL_USER_ATTRIBUTES }, SearchScope.OBJECT, 0, 0, Connection.AliasDereferencingMethod.NEVER,
Connection.ReferralHandlingMethod.IGNORE, false, null );
SearchRunnable.searchAndUpdateModel( browserConnection, search, monitor );
// the list of entries under the Root DSE
Map<LdapDN, IEntry> rootDseEntries = new HashMap<LdapDN, IEntry>();
// 1st: add base DNs, either the specified or from the namingContexts attribute
if ( !browserConnection.isFetchBaseDNs() && browserConnection.getBaseDN() != null
&& !"".equals( browserConnection.getBaseDN().toString() ) )
{
// only add the specified base DN
LdapDN dn = browserConnection.getBaseDN();
IEntry entry = browserConnection.getEntryFromCache( dn );
if ( entry == null )
{
entry = new BaseDNEntry( ( LdapDN ) dn.clone(), browserConnection );
browserConnection.cacheEntry( entry );
}
rootDseEntries.put( dn, entry );
}
else
{
// get base DNs from namingContexts attribute
Set<String> namingContextSet = new HashSet<String>();
IAttribute attribute = browserConnection.getRootDSE().getAttribute( SchemaConstants.NAMING_CONTEXTS_AT );
if ( attribute != null )
{
String[] values = attribute.getStringValues();
for ( int i = 0; i < values.length; i++ )
{
namingContextSet.add( values[i] );
}
}
if ( !namingContextSet.isEmpty() )
{
for ( String namingContext : namingContextSet )
{
- if ( namingContext.charAt( namingContext.length() - 1 ) == '\u0000' )
+ if ( namingContext.length() > 0 && namingContext.charAt( namingContext.length() - 1 ) == '\u0000' )
{
namingContext = namingContext.substring( 0, namingContext.length() - 1 );
}
if ( !"".equals( namingContext ) ) //$NON-NLS-1$
{
try
{
LdapDN dn = new LdapDN( namingContext );
IEntry entry = browserConnection.getEntryFromCache( dn );
if ( entry == null )
{
entry = new BaseDNEntry( dn, browserConnection );
browserConnection.cacheEntry( entry );
}
rootDseEntries.put( dn, entry );
}
catch ( InvalidNameException e )
{
monitor.reportError( BrowserCoreMessages.model__error_setting_base_dn, e );
}
}
else
{
// special handling of empty namingContext (Novell eDirectory):
// perform a one-level search and add all result DNs to the set
searchRootDseEntries( browserConnection, rootDseEntries, monitor );
}
}
}
else
{
// special handling of non-existing namingContexts attribute (Oracle Internet Directory)
// perform a one-level search and add all result DNs to the set
searchRootDseEntries( browserConnection, rootDseEntries, monitor );
}
}
// 2nd: add schema sub-entry
IEntry[] schemaEntries = getDirectoryMetadataEntries( browserConnection, SchemaConstants.SUBSCHEMA_SUBENTRY_AT );
for ( IEntry entry : schemaEntries )
{
if ( entry instanceof DirectoryMetadataEntry )
{
( ( DirectoryMetadataEntry ) entry ).setSchemaEntry( true );
}
rootDseEntries.put( entry.getDn(), entry );
}
// get other meta data entries
IAttribute[] rootDseAttributes = browserConnection.getRootDSE().getAttributes();
if ( rootDseAttributes != null )
{
for ( IAttribute attribute : rootDseAttributes )
{
IEntry[] metadataEntries = getDirectoryMetadataEntries( browserConnection, attribute.getDescription() );
for ( IEntry entry : metadataEntries )
{
rootDseEntries.put( entry.getDn(), entry );
}
}
}
// try to init entries
StudioProgressMonitor dummyMonitor = new StudioProgressMonitor( monitor );
for ( IEntry entry : rootDseEntries.values() )
{
initBaseEntry( entry, dummyMonitor );
}
// set flags
browserConnection.getRootDSE().setHasMoreChildren( false );
browserConnection.getRootDSE().setAttributesInitialized( true );
browserConnection.getRootDSE().setOperationalAttributesInitialized( true );
browserConnection.getRootDSE().setChildrenInitialized( true );
browserConnection.getRootDSE().setHasChildrenHint( true );
browserConnection.getRootDSE().setDirectoryEntry( true );
}
private static void initBaseEntry( IEntry entry, StudioProgressMonitor monitor )
{
IBrowserConnection browserConnection = entry.getBrowserConnection();
LdapDN dn = entry.getDn();
// search the entry
AliasDereferencingMethod derefAliasMethod = browserConnection.getAliasesDereferencingMethod();
ReferralHandlingMethod handleReferralsMethod = browserConnection.getReferralsHandlingMethod();
ISearch search = new Search( null, browserConnection, dn, ISearch.FILTER_TRUE, ISearch.NO_ATTRIBUTES,
SearchScope.OBJECT, 1, 0, derefAliasMethod, handleReferralsMethod, true, null );
SearchRunnable.searchAndUpdateModel( browserConnection, search, monitor );
ISearchResult[] results = search.getSearchResults();
if ( results != null && results.length == 1 )
{
// add entry to Root DSE
ISearchResult result = results[0];
entry = result.getEntry();
browserConnection.getRootDSE().addChild( entry );
}
else
{
// DN exists in the Root DSE, but doesn't exist in directory
browserConnection.uncacheEntryRecursive( entry );
}
}
private static IEntry[] getDirectoryMetadataEntries( IBrowserConnection browserConnection,
String metadataAttributeName )
{
List<LdapDN> metadataEntryDnList = new ArrayList<LdapDN>();
IAttribute attribute = browserConnection.getRootDSE().getAttribute( metadataAttributeName );
if ( attribute != null )
{
String[] values = attribute.getStringValues();
for ( String dn : values )
{
if ( dn != null && !"".equals( dn ) )
{
try
{
metadataEntryDnList.add( new LdapDN( dn ) );
}
catch ( InvalidNameException e )
{
}
}
}
}
IEntry[] metadataEntries = new IEntry[metadataEntryDnList.size()];
for ( int i = 0; i < metadataEntryDnList.size(); i++ )
{
LdapDN dn = metadataEntryDnList.get( i );
metadataEntries[i] = browserConnection.getEntryFromCache( dn );
if ( metadataEntries[i] == null )
{
metadataEntries[i] = new DirectoryMetadataEntry( dn, browserConnection );
metadataEntries[i].setDirectoryEntry( true );
browserConnection.cacheEntry( metadataEntries[i] );
}
}
return metadataEntries;
}
private static void searchRootDseEntries( IBrowserConnection browserConnection, Map<LdapDN, IEntry> rootDseEntries,
StudioProgressMonitor monitor )
{
ISearch search = new Search( null, browserConnection, LdapDN.EMPTY_LDAPDN, ISearch.FILTER_TRUE,
ISearch.NO_ATTRIBUTES, SearchScope.ONELEVEL, 0, 0, Connection.AliasDereferencingMethod.NEVER,
Connection.ReferralHandlingMethod.IGNORE, false, null );
SearchRunnable.searchAndUpdateModel( browserConnection, search, monitor );
ISearchResult[] results = search.getSearchResults();
for ( ISearchResult searchResult : results )
{
IEntry entry = searchResult.getEntry();
rootDseEntries.put( entry.getDn(), entry );
}
}
}
| true
| true
|
public static synchronized void loadRootDSE( IBrowserConnection browserConnection, StudioProgressMonitor monitor )
{
// clear old children
InitializeChildrenRunnable.clearCaches( browserConnection.getRootDSE(), true );
// delete old attributes
IAttribute[] oldAttributes = browserConnection.getRootDSE().getAttributes();
if ( oldAttributes != null )
{
for ( IAttribute oldAttribute : oldAttributes )
{
browserConnection.getRootDSE().deleteAttribute( oldAttribute );
}
}
// load well-known Root DSE attributes and operational attributes
ISearch search = new Search( null, browserConnection, LdapDN.EMPTY_LDAPDN, ISearch.FILTER_TRUE,
ROOT_DSE_ATTRIBUTES, SearchScope.OBJECT, 0, 0, Connection.AliasDereferencingMethod.NEVER,
Connection.ReferralHandlingMethod.IGNORE, false, null );
SearchRunnable.searchAndUpdateModel( browserConnection, search, monitor );
// load all user attributes
search = new Search( null, browserConnection, LdapDN.EMPTY_LDAPDN, ISearch.FILTER_TRUE, new String[]
{ SchemaConstants.ALL_USER_ATTRIBUTES }, SearchScope.OBJECT, 0, 0, Connection.AliasDereferencingMethod.NEVER,
Connection.ReferralHandlingMethod.IGNORE, false, null );
SearchRunnable.searchAndUpdateModel( browserConnection, search, monitor );
// the list of entries under the Root DSE
Map<LdapDN, IEntry> rootDseEntries = new HashMap<LdapDN, IEntry>();
// 1st: add base DNs, either the specified or from the namingContexts attribute
if ( !browserConnection.isFetchBaseDNs() && browserConnection.getBaseDN() != null
&& !"".equals( browserConnection.getBaseDN().toString() ) )
{
// only add the specified base DN
LdapDN dn = browserConnection.getBaseDN();
IEntry entry = browserConnection.getEntryFromCache( dn );
if ( entry == null )
{
entry = new BaseDNEntry( ( LdapDN ) dn.clone(), browserConnection );
browserConnection.cacheEntry( entry );
}
rootDseEntries.put( dn, entry );
}
else
{
// get base DNs from namingContexts attribute
Set<String> namingContextSet = new HashSet<String>();
IAttribute attribute = browserConnection.getRootDSE().getAttribute( SchemaConstants.NAMING_CONTEXTS_AT );
if ( attribute != null )
{
String[] values = attribute.getStringValues();
for ( int i = 0; i < values.length; i++ )
{
namingContextSet.add( values[i] );
}
}
if ( !namingContextSet.isEmpty() )
{
for ( String namingContext : namingContextSet )
{
if ( namingContext.charAt( namingContext.length() - 1 ) == '\u0000' )
{
namingContext = namingContext.substring( 0, namingContext.length() - 1 );
}
if ( !"".equals( namingContext ) ) //$NON-NLS-1$
{
try
{
LdapDN dn = new LdapDN( namingContext );
IEntry entry = browserConnection.getEntryFromCache( dn );
if ( entry == null )
{
entry = new BaseDNEntry( dn, browserConnection );
browserConnection.cacheEntry( entry );
}
rootDseEntries.put( dn, entry );
}
catch ( InvalidNameException e )
{
monitor.reportError( BrowserCoreMessages.model__error_setting_base_dn, e );
}
}
else
{
// special handling of empty namingContext (Novell eDirectory):
// perform a one-level search and add all result DNs to the set
searchRootDseEntries( browserConnection, rootDseEntries, monitor );
}
}
}
else
{
// special handling of non-existing namingContexts attribute (Oracle Internet Directory)
// perform a one-level search and add all result DNs to the set
searchRootDseEntries( browserConnection, rootDseEntries, monitor );
}
}
// 2nd: add schema sub-entry
IEntry[] schemaEntries = getDirectoryMetadataEntries( browserConnection, SchemaConstants.SUBSCHEMA_SUBENTRY_AT );
for ( IEntry entry : schemaEntries )
{
if ( entry instanceof DirectoryMetadataEntry )
{
( ( DirectoryMetadataEntry ) entry ).setSchemaEntry( true );
}
rootDseEntries.put( entry.getDn(), entry );
}
// get other meta data entries
IAttribute[] rootDseAttributes = browserConnection.getRootDSE().getAttributes();
if ( rootDseAttributes != null )
{
for ( IAttribute attribute : rootDseAttributes )
{
IEntry[] metadataEntries = getDirectoryMetadataEntries( browserConnection, attribute.getDescription() );
for ( IEntry entry : metadataEntries )
{
rootDseEntries.put( entry.getDn(), entry );
}
}
}
// try to init entries
StudioProgressMonitor dummyMonitor = new StudioProgressMonitor( monitor );
for ( IEntry entry : rootDseEntries.values() )
{
initBaseEntry( entry, dummyMonitor );
}
// set flags
browserConnection.getRootDSE().setHasMoreChildren( false );
browserConnection.getRootDSE().setAttributesInitialized( true );
browserConnection.getRootDSE().setOperationalAttributesInitialized( true );
browserConnection.getRootDSE().setChildrenInitialized( true );
browserConnection.getRootDSE().setHasChildrenHint( true );
browserConnection.getRootDSE().setDirectoryEntry( true );
}
|
public static synchronized void loadRootDSE( IBrowserConnection browserConnection, StudioProgressMonitor monitor )
{
// clear old children
InitializeChildrenRunnable.clearCaches( browserConnection.getRootDSE(), true );
// delete old attributes
IAttribute[] oldAttributes = browserConnection.getRootDSE().getAttributes();
if ( oldAttributes != null )
{
for ( IAttribute oldAttribute : oldAttributes )
{
browserConnection.getRootDSE().deleteAttribute( oldAttribute );
}
}
// load well-known Root DSE attributes and operational attributes
ISearch search = new Search( null, browserConnection, LdapDN.EMPTY_LDAPDN, ISearch.FILTER_TRUE,
ROOT_DSE_ATTRIBUTES, SearchScope.OBJECT, 0, 0, Connection.AliasDereferencingMethod.NEVER,
Connection.ReferralHandlingMethod.IGNORE, false, null );
SearchRunnable.searchAndUpdateModel( browserConnection, search, monitor );
// load all user attributes
search = new Search( null, browserConnection, LdapDN.EMPTY_LDAPDN, ISearch.FILTER_TRUE, new String[]
{ SchemaConstants.ALL_USER_ATTRIBUTES }, SearchScope.OBJECT, 0, 0, Connection.AliasDereferencingMethod.NEVER,
Connection.ReferralHandlingMethod.IGNORE, false, null );
SearchRunnable.searchAndUpdateModel( browserConnection, search, monitor );
// the list of entries under the Root DSE
Map<LdapDN, IEntry> rootDseEntries = new HashMap<LdapDN, IEntry>();
// 1st: add base DNs, either the specified or from the namingContexts attribute
if ( !browserConnection.isFetchBaseDNs() && browserConnection.getBaseDN() != null
&& !"".equals( browserConnection.getBaseDN().toString() ) )
{
// only add the specified base DN
LdapDN dn = browserConnection.getBaseDN();
IEntry entry = browserConnection.getEntryFromCache( dn );
if ( entry == null )
{
entry = new BaseDNEntry( ( LdapDN ) dn.clone(), browserConnection );
browserConnection.cacheEntry( entry );
}
rootDseEntries.put( dn, entry );
}
else
{
// get base DNs from namingContexts attribute
Set<String> namingContextSet = new HashSet<String>();
IAttribute attribute = browserConnection.getRootDSE().getAttribute( SchemaConstants.NAMING_CONTEXTS_AT );
if ( attribute != null )
{
String[] values = attribute.getStringValues();
for ( int i = 0; i < values.length; i++ )
{
namingContextSet.add( values[i] );
}
}
if ( !namingContextSet.isEmpty() )
{
for ( String namingContext : namingContextSet )
{
if ( namingContext.length() > 0 && namingContext.charAt( namingContext.length() - 1 ) == '\u0000' )
{
namingContext = namingContext.substring( 0, namingContext.length() - 1 );
}
if ( !"".equals( namingContext ) ) //$NON-NLS-1$
{
try
{
LdapDN dn = new LdapDN( namingContext );
IEntry entry = browserConnection.getEntryFromCache( dn );
if ( entry == null )
{
entry = new BaseDNEntry( dn, browserConnection );
browserConnection.cacheEntry( entry );
}
rootDseEntries.put( dn, entry );
}
catch ( InvalidNameException e )
{
monitor.reportError( BrowserCoreMessages.model__error_setting_base_dn, e );
}
}
else
{
// special handling of empty namingContext (Novell eDirectory):
// perform a one-level search and add all result DNs to the set
searchRootDseEntries( browserConnection, rootDseEntries, monitor );
}
}
}
else
{
// special handling of non-existing namingContexts attribute (Oracle Internet Directory)
// perform a one-level search and add all result DNs to the set
searchRootDseEntries( browserConnection, rootDseEntries, monitor );
}
}
// 2nd: add schema sub-entry
IEntry[] schemaEntries = getDirectoryMetadataEntries( browserConnection, SchemaConstants.SUBSCHEMA_SUBENTRY_AT );
for ( IEntry entry : schemaEntries )
{
if ( entry instanceof DirectoryMetadataEntry )
{
( ( DirectoryMetadataEntry ) entry ).setSchemaEntry( true );
}
rootDseEntries.put( entry.getDn(), entry );
}
// get other meta data entries
IAttribute[] rootDseAttributes = browserConnection.getRootDSE().getAttributes();
if ( rootDseAttributes != null )
{
for ( IAttribute attribute : rootDseAttributes )
{
IEntry[] metadataEntries = getDirectoryMetadataEntries( browserConnection, attribute.getDescription() );
for ( IEntry entry : metadataEntries )
{
rootDseEntries.put( entry.getDn(), entry );
}
}
}
// try to init entries
StudioProgressMonitor dummyMonitor = new StudioProgressMonitor( monitor );
for ( IEntry entry : rootDseEntries.values() )
{
initBaseEntry( entry, dummyMonitor );
}
// set flags
browserConnection.getRootDSE().setHasMoreChildren( false );
browserConnection.getRootDSE().setAttributesInitialized( true );
browserConnection.getRootDSE().setOperationalAttributesInitialized( true );
browserConnection.getRootDSE().setChildrenInitialized( true );
browserConnection.getRootDSE().setHasChildrenHint( true );
browserConnection.getRootDSE().setDirectoryEntry( true );
}
|
diff --git a/src/me/ellbristow/ChestBank/ChestBank.java b/src/me/ellbristow/ChestBank/ChestBank.java
index 8b5c142..f04986a 100644
--- a/src/me/ellbristow/ChestBank/ChestBank.java
+++ b/src/me/ellbristow/ChestBank/ChestBank.java
@@ -1,668 +1,675 @@
package me.ellbristow.ChestBank;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import net.minecraft.server.InventoryLargeChest;
import net.minecraft.server.TileEntityChest;
import org.bukkit.ChatColor;
import org.bukkit.OfflinePlayer;
import org.bukkit.block.Block;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.craftbukkit.inventory.CraftInventoryDoubleChest;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.entity.Player;
import org.bukkit.inventory.DoubleChestInventory;
import org.bukkit.inventory.ItemStack;
import org.bukkit.permissions.Permission;
import org.bukkit.plugin.PluginDescriptionFile;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.java.JavaPlugin;
public class ChestBank extends JavaPlugin {
public static ChestBank plugin;
public static Logger logger;
public FileConfiguration banksConfig;
public FileConfiguration config;
private File bankFile = null;
public HashMap<String, DoubleChestInventory> chestAccounts;
public int[] limits = {10,25,35};
public final ChestBankListener playerListener = new ChestBankListener(this);
public HashMap<String, String> openInvs = new HashMap<String, String>();
public boolean useWhitelist = false;
public boolean useBlacklist = false;
public String[] whitelist = new String[]{"41","264","266","371"};
public String[] blacklist = new String[]{"8","9","10","11","51"};
public boolean gotVault = false;
public boolean gotEconomy = false;
public boolean useNetworkPerms = false;
public vaultBridge vault;
public double createFee;
public double useFee;
@Override
public void onDisable () {
}
@Override
public void onEnable () {
PluginManager pm = getServer().getPluginManager();
logger = getLogger();
config = getConfig();
limits[0] = 10;
limits[0] = config.getInt("normal_limit");
config.set("normal_limit", limits[0]);
limits[1] = 25;
limits[1] = config.getInt("elevated_limit");
config.set("elevated_limit", limits[1]);
limits[2] = 25;
limits[2] = config.getInt("vip_limit");
config.set("vip_limit", limits[2]);
useWhitelist = config.getBoolean("use_whitelist", false);
useBlacklist = config.getBoolean("use_blacklist", false);
useNetworkPerms = config.getBoolean("use_network_perms", false);
config.set("use_whitelist", useWhitelist);
config.set("use_blacklist", useBlacklist);
config.set("use_network_perms", useNetworkPerms);
String whitelistString = config.getString("whitelist", "41,264,266,371");
if (useWhitelist) {
whitelist = whitelistString.split(",");
whitelistString = "";
if (whitelist.length != 0) {
for(String item : whitelist) {
if (!"".equals(whitelistString)) {
whitelistString += ",";
}
whitelistString += item;
}
}
}
config.set("whitelist", whitelistString);
String blacklistString = config.getString("blacklist", "8,9,10,11,51");
if (useBlacklist) {
blacklist = blacklistString.split(",");
blacklistString = "";
if (blacklist.length != 0) {
for(String item : blacklist) {
if (!"".equals(blacklistString)) {
blacklistString += ",";
}
blacklistString += item;
}
}
}
config.set("blacklist", blacklistString);
if (getServer().getPluginManager().isPluginEnabled("Vault")) {
gotVault = true;
getLogger().info("[Vault] found and hooked!");
vault = new vaultBridge(this);
gotEconomy = vault.foundEconomy;
createFee = config.getDouble("creation_fee", 0.0);
useFee = config.getDouble("transaction_fee", 0.0);
config.set("creation_fee", createFee);
config.set("transaction_fee", useFee);
}
saveConfig();
pm.registerEvents(playerListener, this);
banksConfig = getChestBanks();
chestAccounts = getAccounts();
if (useNetworkPerms) {
registerNetworkPerms();
}
}
@Override
public boolean onCommand (CommandSender sender, Command cmd, String commandLabel, String[] args) {
if (!(sender instanceof Player)) {
sender.sendMessage("Sorry! The console can't use this command!");
return true;
}
Player player = (Player) sender;
if (args.length == 0) {
// Command list requested
PluginDescriptionFile pdfFile = this.getDescription();
player.sendMessage(ChatColor.GOLD + pdfFile.getName() + " version " + pdfFile.getVersion() + " by " + pdfFile.getAuthors());
boolean found = false;
if (player.hasPermission("chestbank.info")) {
player.sendMessage(ChatColor.GOLD + " /chestbank info " + ChatColor.GRAY + ": Get targetted ChestBank's info.");
+ found = true;
}
if (player.hasPermission("chestbank.list")) {
player.sendMessage(ChatColor.GOLD + " /chestbank list " + ChatColor.GRAY + ": List all existing ChestBank networks.");
+ found = true;
}
if (player.hasPermission("chestbank.create")) {
player.sendMessage(ChatColor.GOLD + " /chestbank create " + ChatColor.GRAY + ": Make targetted chest a ChestBank.");
+ found = true;
}
if ((!useNetworkPerms && player.hasPermission("chestbank.create.networks")) || useNetworkPerms) {
player.sendMessage(ChatColor.GOLD + " /chestbank create {network}" + ChatColor.GRAY + ": Create a Chestbank on the");
- player.sendMessage(ChatColor.GRAY + " named network.");
+ player.sendMessage(ChatColor.GRAY + " named network.");
+ found = true;
}
if (player.hasPermission("chestbank.remove")) {
player.sendMessage(ChatColor.GOLD + " /chestbank remove " + ChatColor.GRAY + ": Make targetted ChestBank a chest.");
+ found = true;
}
if (player.hasPermission("chestbank.see")) {
player.sendMessage(ChatColor.GOLD + " /chestbank see [player] " + ChatColor.GRAY + ": View player's ChestBank account.");
+ found = true;
}
if (!found) {
player.sendMessage(ChatColor.GOLD + "There are no ChestBank commands you can use!");
+ found = true;
}
return true;
}
else if (args.length == 1 || (args.length == 2 && (args[0].equalsIgnoreCase("create") || args[0].equalsIgnoreCase("remove")))) {
if (args[0].equalsIgnoreCase("see")) {
player.sendMessage(ChatColor.RED + "Please specify a player!");
return false;
}
// Create, Remove, List, Info
else if (args[0].equalsIgnoreCase("create")) {
if (!player.hasPermission("chestbank.create")) {
player.sendMessage(ChatColor.RED + "You do not have permission to create a ChestBank!");
return true;
}
if (args.length == 2 && useNetworkPerms && (!player.hasPermission("chestbank.create.networks." + args[1].toLowerCase()) && !player.hasPermission("chestbank.create.networks.*"))) {
player.sendMessage(ChatColor.RED + "You do not have permission to create a ChestBank on the");
player.sendMessage(ChatColor.WHITE + args[1].toLowerCase() + ChatColor.RED + " network!");
return true;
} else if (args.length == 2 && !useNetworkPerms && !player.hasPermission("chestbank.create.networks")) {
player.sendMessage(ChatColor.RED + "You do not have permission to create a ChestBank on named networks!");
return true;
}
Block block = player.getTargetBlock(null, 4);
if (block.getTypeId() != 54) {
player.sendMessage(ChatColor.RED + "You're not looking at a chest!");
return true;
}
if (isBankBlock(block)) {
player.sendMessage(ChatColor.RED + "That is already a ChestBank!");
return true;
}
if (gotVault && gotEconomy && createFee != 0) {
if ((args.length == 2 && !player.hasPermission("chestbank.free.create.networks")) || (args.length == 1 && !player.hasPermission("chestbank.free.create"))) {
if (vault.economy.getBalance(player.getName()) < createFee) {
player.sendMessage(ChatColor.RED + "You cannot afford the ChestBank creation fee of");
player.sendMessage(ChatColor.WHITE + vault.economy.format(createFee) + ChatColor.RED + "!");
return true;
}
}
}
if (args.length == 2) {
// Network specified
String network = args[1];
String bankNames = banksConfig.getString("networks.names", "");
if (bankNames.equals("")) {
bankNames = args[1];
} else {
String[] bankNamesArray = bankNames.split(":");
boolean exists = false;
for (String bankName : bankNamesArray) {
if (bankName.equals(args[1])) {
exists = true;
}
}
if (!exists) {
bankNames += ":" + args[1];
}
}
banksConfig.set("networks.names", bankNames);
ConfigurationSection networkBank = banksConfig.getConfigurationSection("networks." + args[1]);
String locsList = "";
if (networkBank != null) {
locsList = networkBank.getString("locations", "");
if (!locsList.equals("")) {
locsList += ";";
}
}
locsList += block.getWorld().getName() + ":" + block.getX() + ":" + block.getY() + ":" + block.getZ();
Block doubleChest = getDoubleChest(block);
if (doubleChest != null) {
locsList += ":" + doubleChest.getX() + ":" + doubleChest.getY() + ":" + doubleChest.getZ();
}
banksConfig.set("networks." + args[1] + ".locations", locsList);
saveChestBanks();
player.sendMessage(ChatColor.GOLD + "ChestBank created on " + ChatColor.WHITE + network + ChatColor.GOLD + " Network!");
if (gotVault && gotEconomy && createFee != 0 && !player.hasPermission("chestbank.free.create.networks")) {
vault.economy.withdrawPlayer(player.getName(), createFee);
player.sendMessage(ChatColor.GOLD + "You were charged " + ChatColor.WHITE + vault.economy.format(createFee) + ChatColor.GOLD + " for ChestBank creation!");
}
return true;
}
String bankList = banksConfig.getString("banks", "");
if (bankList.equals("")) {
bankList += block.getWorld().getName() + ":" + block.getX() + ":" + block.getY() + ":" + block.getZ();
}
else {
bankList += ";" + block.getWorld().getName() + ":" + block.getX() + ":" + block.getY() + ":" + block.getZ();
}
Block doubleChest = getDoubleChest(block);
if (doubleChest != null) {
bankList += ":" + doubleChest.getX() + ":" + doubleChest.getY() + ":" + doubleChest.getZ();
}
banksConfig.set("banks", bankList);
saveChestBanks();
player.sendMessage(ChatColor.GOLD + "ChestBank created!");
if (gotVault && gotEconomy && createFee != 0 && !player.hasPermission("chestbank.free.create")) {
vault.economy.withdrawPlayer(player.getName(), createFee);
player.sendMessage(ChatColor.GOLD + "You were charged " + ChatColor.WHITE + vault.economy.format(createFee) + ChatColor.GOLD + " for ChestBank creation!");
}
return true;
}
else if (args[0].equalsIgnoreCase("remove")) {
if (!player.hasPermission("chestbank.remove")) {
player.sendMessage(ChatColor.RED + "You do not have permission to remove a ChestBank!");
return true;
}
Block block = player.getTargetBlock(null, 4);
if (isNetworkBank(block) && useNetworkPerms && (!player.hasPermission("chestbank.remove.networks." + getNetwork(block).toLowerCase()) && !player.hasPermission("chestbank.remove.networks.*"))) {
player.sendMessage(ChatColor.RED + "You do not have permission to remove a ChestBank on the");
player.sendMessage(ChatColor.WHITE + getNetwork(block) + ChatColor.RED + " network!");
return true;
} else if (isNetworkBank(block) && !useNetworkPerms && !player.hasPermission("chestbank.remove.networks")) {
player.sendMessage(ChatColor.RED + "You do not have permission to remove a ChestBank on named networks!");
return true;
}
if (!isBankBlock(block)) {
player.sendMessage(ChatColor.RED + "You're not looking at a ChestBank!");
return true;
}
if (isNetworkBank(block)) {
String networkNames = banksConfig.getString("networks.names");
String[] networkNamesArray = networkNames.split(":");
String networkName = getNetwork(block);
String networkLocs = banksConfig.getString("networks." + networkName + ".locations", "");
String newNetworkLocs = "";
for (String location : networkLocs.split(";")) {
String[] loc = location.split(":");
if (loc.length == 4) {
String bankWorld = loc[0];
int bankX = Integer.parseInt(loc[1]);
int bankY = Integer.parseInt(loc[2]);
int bankZ = Integer.parseInt(loc[3]);
if (!bankWorld.equals(block.getWorld().getName()) || bankX != block.getX() || bankY != block.getY() || bankZ != block.getZ()) {
if (!newNetworkLocs.equals("")) {
newNetworkLocs += ";";
}
newNetworkLocs += bankWorld + ":" + bankX + ":" + bankY + ":" + bankZ;
}
} else if (loc.length == 7) {
String bankWorld = loc[0];
int bankX = Integer.parseInt(loc[1]);
int bankY = Integer.parseInt(loc[2]);
int bankZ = Integer.parseInt(loc[3]);
int bankA = Integer.parseInt(loc[4]);
int bankB = Integer.parseInt(loc[5]);
int bankC = Integer.parseInt(loc[6]);
if (!bankWorld.equals(block.getWorld().getName()) || (!(bankX == block.getX() && bankY == block.getY() && bankZ == block.getZ()) && !(bankA == block.getX() && bankB == block.getY() && bankC == block.getZ()) )) {
if (!newNetworkLocs.equals("")) {
newNetworkLocs += ";";
}
newNetworkLocs += bankWorld + ":" + bankX + ":" + bankY + ":" + bankZ + ":" + bankA + ":" + bankB + ":" + bankC;
}
}
}
banksConfig.set("networks." + networkName + ".locations", newNetworkLocs);
saveChestBanks();
player.sendMessage(ChatColor.GOLD + "ChestBank removed from " + ChatColor.WHITE + networkName + ChatColor.GOLD + " network!");
return true;
}
String bankList = banksConfig.getString("banks");
String[] bankSplit = bankList.split(";");
if (bankSplit.length == 0 || bankSplit.length == 1) {
banksConfig.set("banks", "");
}
else {
String newBankList = "";
for (String chestBank : bankSplit) {
String[] bankLoc = chestBank.split(":");
if (bankLoc.length == 4) {
String blockWorld = bankLoc[0];
int blockX = Integer.parseInt(bankLoc[1]);
int blockY = Integer.parseInt(bankLoc[2]);
int blockZ = Integer.parseInt(bankLoc[3]);
if (!blockWorld.equals(block.getWorld().getName()) || blockX != block.getX() || blockY != block.getY() || blockZ != block.getZ()) {
if (!newBankList.equals("")) {
newBankList += ";";
}
newBankList += blockWorld + ":" + blockX + ":" + blockY + ":" + blockZ;
}
}
else {
String blockWorld = bankLoc[0];
int blockX = Integer.parseInt(bankLoc[1]);
int blockY = Integer.parseInt(bankLoc[2]);
int blockZ = Integer.parseInt(bankLoc[3]);
int blockA = Integer.parseInt(bankLoc[4]);
int blockB = Integer.parseInt(bankLoc[5]);
int blockC = Integer.parseInt(bankLoc[6]);
if (!(blockX == block.getX() && blockY == block.getY() && blockZ == block.getZ()) && !(blockA == block.getX() && blockB == block.getY() && blockC == block.getZ())) {
if (!newBankList.equals("")) {
newBankList += ";";
}
newBankList += blockWorld + ":" + blockX + ":" + blockY + ":" + blockZ + ":" + blockA + ":" + blockB + ":" + blockC;
}
}
}
banksConfig.set("banks", newBankList);
}
saveChestBanks();
player.sendMessage(ChatColor.GOLD + "ChestBank removed!");
return true;
}
else if (args[0].equalsIgnoreCase("info")) {
if (!player.hasPermission("chestbank.info")) {
player.sendMessage(ChatColor.RED + "You do not have permission to get ChestBank info!");
return true;
}
Block block = player.getTargetBlock(null, 4);
if (!isBankBlock(block)) {
player.sendMessage(ChatColor.RED + "This block is not a ChestBank!");
return true;
} else {
if (!isNetworkBank(block)) {
player.sendMessage(ChatColor.GOLD + "This ChestBank is on the main network!");
return true;
} else {
String network = getNetwork(block);
player.sendMessage(ChatColor.GOLD + "This ChestBank is on the " + ChatColor.WHITE + network + ChatColor.GOLD + " network!");
return true;
}
}
}
else if (args[0].equalsIgnoreCase("list")) {
String bankLocs = banksConfig.getString("banks", "");
player.sendMessage(ChatColor.GOLD + "ChestBank Networks:");
int banks = 0;
if (!bankLocs.equals("")) {
banks = bankLocs.split(";").length;
}
player.sendMessage(ChatColor.GOLD + " Main Network: " + ChatColor.WHITE + banks + " Location(s)");
String networkNames = banksConfig.getString("networks.names", "");
if (!networkNames.equals("")) {
String[] networks = networkNames.split(":");
for (String network : networks) {
bankLocs = banksConfig.getString("networks." + network + ".locations", "");
banks = 0;
if (!bankLocs.equals("")) {
banks = bankLocs.split(";").length;
}
player.sendMessage(ChatColor.GOLD + " " + network + " Network: " + ChatColor.WHITE + banks + " Location(s)");
}
}
return true;
}
}
else if (args.length == 2) {
if (!args[0].equalsIgnoreCase("see")) {
return false;
}
if (!player.hasPermission("chestbank.see")) {
player.sendMessage(ChatColor.RED + "You do not have permission to access other players' accounts!");
return true;
}
Block block = player.getTargetBlock(null, 4);
if (isNetworkBank(block) && !player.hasPermission("chestbank.see.networks")) {
player.sendMessage(ChatColor.RED + "You do not have permission to access other players' " + ChatColor.WHITE + getNetwork(block) + ChatColor.GOLD + " accounts!");
return true;
}
if (!isBankBlock(block)) {
player.sendMessage(ChatColor.RED + "You're not looking at a ChestBank!");
return true;
}
OfflinePlayer target = getServer().getOfflinePlayer(args[1]);
String account = "";
if (isNetworkBank(block)) {
account = getNetwork(block) + ">>" + target.getName();
} else {
account = target.getName();
}
if (chestAccounts.containsKey(account)) {
DoubleChestInventory lc = chestAccounts.get(account);
player.openInventory(lc);
}
else {
player.sendMessage(ChatColor.RED + target.getName() + " does not have a ChestBank account here!");
}
return true;
}
return false;
}
public boolean isBankBlock (Block block) {
// Check if the block is a ChestBank
String bankList = banksConfig.getString("banks", "");
if (!bankList.equals("")) {
String[] bankSplit = bankList.split(";");
for (String bank : bankSplit) {
if (!bank.isEmpty() && !bank.equals("")) {
String[] bankCoords = bank.split(":");
String blockWorld = bankCoords[0];
int blockX = Integer.parseInt(bankCoords[1]);
int blockY = Integer.parseInt(bankCoords[2]);
int blockZ = Integer.parseInt(bankCoords[3]);
if (block.getWorld().getName().equals(blockWorld) && block.getX() == blockX && block.getY() == blockY && block.getZ() == blockZ) {
return true;
}
if (bankCoords.length > 4) {
blockX = Integer.parseInt(bankCoords[4]);
blockY = Integer.parseInt(bankCoords[5]);
blockZ = Integer.parseInt(bankCoords[6]);
if (block.getX() == blockX && block.getY() == blockY && block.getZ() == blockZ) {
return true;
}
}
}
}
}
if (isNetworkBank(block)) {
return true;
}
return false;
}
public boolean isNetworkBank(Block block) {
ConfigurationSection bankList = banksConfig.getConfigurationSection("networks");
if (bankList != null) {
String bankNames = bankList.getString("names", "");
if (!bankNames.equals("")) {
String[] bankNamesArray = bankNames.split(":");
for (String bankName : bankNamesArray) {
String bankLocs = bankList.getString(bankName + ".locations", "");
if (!bankLocs.equals("")) {
String[] bankLocations = bankLocs.split(";");
for (String bankLoc : bankLocations) {
String[] bankCoords = bankLoc.split(":");
String bankWorld = bankCoords[0];
int bankX = Integer.parseInt(bankCoords[1]);
int bankY = Integer.parseInt(bankCoords[2]);
int bankZ = Integer.parseInt(bankCoords[3]);
if (block.getWorld().getName().equals(bankWorld) && block.getX()== bankX && block.getY() == bankY && block.getZ() == bankZ) {
return true;
}
if (bankCoords.length == 7) {
bankX = Integer.parseInt(bankCoords[4]);
bankY = Integer.parseInt(bankCoords[5]);
bankZ = Integer.parseInt(bankCoords[6]);
if (block.getWorld().getName().equals(bankWorld) && block.getX()== bankX && block.getY() == bankY && block.getZ() == bankZ) {
return true;
}
}
}
}
}
}
}
return false;
}
public String getNetwork(Block block) {
String network = "";
String networkNames = banksConfig.getString("networks.names");
String[] networkNamesArray = networkNames.split(":");
for (String networkName : networkNamesArray) {
String networkLocs = banksConfig.getString("networks." + networkName + ".locations", "");
for (String location : networkLocs.split(";")) {
String[] loc = location.split(":");
if (loc.length == 4) {
String bankWorld = loc[0];
int bankX = Integer.parseInt(loc[1]);
int bankY = Integer.parseInt(loc[2]);
int bankZ = Integer.parseInt(loc[3]);
if (bankWorld.equals(block.getWorld().getName()) && bankX == block.getX() && bankY == block.getY() && bankZ == block.getZ()) {
network = networkName;
}
} else if (loc.length == 7) {
String bankWorld = loc[0];
int bankX = Integer.parseInt(loc[1]);
int bankY = Integer.parseInt(loc[2]);
int bankZ = Integer.parseInt(loc[3]);
int bankA = Integer.parseInt(loc[4]);
int bankB = Integer.parseInt(loc[5]);
int bankC = Integer.parseInt(loc[6]);
if (bankWorld.equals(block.getWorld().getName()) && ((bankX == block.getX() && bankY == block.getY() && bankZ == block.getZ()) || (bankA == block.getX() && bankB == block.getY() && bankC == block.getZ()))) {
network = networkName;
}
}
}
}
return network;
}
public Block getDoubleChest(Block block) {
int blockX = block.getX();
int blockY = block.getY();
int blockZ = block.getZ();
if (block.getWorld().getBlockAt(blockX + 1, blockY, blockZ).getTypeId() == 54) {
return block.getWorld().getBlockAt(blockX + 1, blockY, blockZ);
}
if (block.getWorld().getBlockAt(blockX - 1, blockY, blockZ).getTypeId() == 54) {
return block.getWorld().getBlockAt(blockX - 1, blockY, blockZ);
}
if (block.getWorld().getBlockAt(blockX , blockY, blockZ + 1).getTypeId() == 54) {
return block.getWorld().getBlockAt(blockX, blockY, blockZ + 1);
}
if (block.getWorld().getBlockAt(blockX , blockY, blockZ - 1).getTypeId() == 54) {
return block.getWorld().getBlockAt(blockX, blockY, blockZ - 1);
}
return null;
}
public HashMap<String, DoubleChestInventory> getAccounts() {
HashMap<String, DoubleChestInventory> chests = new HashMap<String, DoubleChestInventory>();
ConfigurationSection chestSection = banksConfig.getConfigurationSection("accounts");
if (chestSection != null) {
Set<String> fileChests = chestSection.getKeys(false);
if (fileChests != null) {
for (String playerName : fileChests) {
String account = "";
if (playerName.contains(">>")) {
account = playerName.split(">>")[1];
} else {
account = playerName;
}
DoubleChestInventory returnInv = new CraftInventoryDoubleChest(new InventoryLargeChest(account, new TileEntityChest(), new TileEntityChest()));
String[] chestInv = banksConfig.getString("accounts." + playerName).split(";");
int i = 0;
for (String items : chestInv) {
String[] item = items.split(":");
int i0 = Integer.parseInt(item[0]);
int i1 = Integer.parseInt(item[1]);
short i2 = Short.parseShort(item[2]);
if(i0 != 0) {
ItemStack stack = new ItemStack(i0, i1, i2);
if (item.length == 4) {
String[] enchArray = item[3].split(",");
for (String ench : enchArray) {
String[] bits = ench.split("~");
int enchId = Integer.parseInt(bits[0]);
int enchLvl = Integer.parseInt(bits[1]);
stack.addUnsafeEnchantment(Enchantment.getById(enchId), enchLvl);
}
}
returnInv.setItem(i, stack);
}
i++;
}
chests.put(playerName, returnInv);
}
}
}
return chests;
}
public void setAccounts(HashMap<String, DoubleChestInventory> chests) {
Set<String> chestKeys = chests.keySet();
for (String key : chestKeys) {
DoubleChestInventory chest = chests.get(key);
String chestInv = "";
for (ItemStack item : chest.getContents()) {
chestInv += ";";
if (item != null) {
int itemID = item.getTypeId();
int itemCount = item.getAmount();
int itemDamage = item.getDurability();
chestInv += itemID + ":" + itemCount + ":" + itemDamage;
Map<Enchantment, Integer> enchantments = item.getEnchantments();
if (!enchantments.isEmpty()) {
chestInv += ":";
String enchList = "";
Object[] keys = enchantments.keySet().toArray();
Object[] levels = enchantments.values().toArray();
for (int i = 0; i < enchantments.size(); i++) {
enchList += "," + ((Enchantment)keys[i]).getId() + "~" + levels[i];
}
chestInv += enchList.replaceFirst(",", "");
}
}
else {
chestInv += "0:0:0";
}
}
banksConfig.set("accounts." + key, chestInv.replaceFirst(";", ""));
}
saveChestBanks();
}
private void registerNetworkPerms() {
String networksString = this.banksConfig.getString("networks.names", "");
if (!"".equals(networksString)) {
String[] networks = networksString.split(",");
for (String network : networks) {
getServer().getPluginManager().addPermission(new Permission("chectbank.use.networks." + network.toLowerCase()));
getServer().getPluginManager().addPermission(new Permission("chectbank.create.networks." + network.toLowerCase()));
getServer().getPluginManager().addPermission(new Permission("chectbank.remove.networks." + network.toLowerCase()));
}
}
}
public void loadChestBanks() {
if (bankFile == null) {
bankFile = new File(getDataFolder(),"chests.yml");
}
banksConfig = YamlConfiguration.loadConfiguration(bankFile);
}
public FileConfiguration getChestBanks() {
if (banksConfig == null) {
loadChestBanks();
}
return banksConfig;
}
public void saveChestBanks() {
if (banksConfig == null || bankFile == null) {
return;
}
try {
banksConfig.save(bankFile);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Could not save " + bankFile, ex );
}
}
}
| false
| true
|
public boolean onCommand (CommandSender sender, Command cmd, String commandLabel, String[] args) {
if (!(sender instanceof Player)) {
sender.sendMessage("Sorry! The console can't use this command!");
return true;
}
Player player = (Player) sender;
if (args.length == 0) {
// Command list requested
PluginDescriptionFile pdfFile = this.getDescription();
player.sendMessage(ChatColor.GOLD + pdfFile.getName() + " version " + pdfFile.getVersion() + " by " + pdfFile.getAuthors());
boolean found = false;
if (player.hasPermission("chestbank.info")) {
player.sendMessage(ChatColor.GOLD + " /chestbank info " + ChatColor.GRAY + ": Get targetted ChestBank's info.");
}
if (player.hasPermission("chestbank.list")) {
player.sendMessage(ChatColor.GOLD + " /chestbank list " + ChatColor.GRAY + ": List all existing ChestBank networks.");
}
if (player.hasPermission("chestbank.create")) {
player.sendMessage(ChatColor.GOLD + " /chestbank create " + ChatColor.GRAY + ": Make targetted chest a ChestBank.");
}
if ((!useNetworkPerms && player.hasPermission("chestbank.create.networks")) || useNetworkPerms) {
player.sendMessage(ChatColor.GOLD + " /chestbank create {network}" + ChatColor.GRAY + ": Create a Chestbank on the");
player.sendMessage(ChatColor.GRAY + " named network.");
}
if (player.hasPermission("chestbank.remove")) {
player.sendMessage(ChatColor.GOLD + " /chestbank remove " + ChatColor.GRAY + ": Make targetted ChestBank a chest.");
}
if (player.hasPermission("chestbank.see")) {
player.sendMessage(ChatColor.GOLD + " /chestbank see [player] " + ChatColor.GRAY + ": View player's ChestBank account.");
}
if (!found) {
player.sendMessage(ChatColor.GOLD + "There are no ChestBank commands you can use!");
}
return true;
}
else if (args.length == 1 || (args.length == 2 && (args[0].equalsIgnoreCase("create") || args[0].equalsIgnoreCase("remove")))) {
if (args[0].equalsIgnoreCase("see")) {
player.sendMessage(ChatColor.RED + "Please specify a player!");
return false;
}
// Create, Remove, List, Info
else if (args[0].equalsIgnoreCase("create")) {
if (!player.hasPermission("chestbank.create")) {
player.sendMessage(ChatColor.RED + "You do not have permission to create a ChestBank!");
return true;
}
if (args.length == 2 && useNetworkPerms && (!player.hasPermission("chestbank.create.networks." + args[1].toLowerCase()) && !player.hasPermission("chestbank.create.networks.*"))) {
player.sendMessage(ChatColor.RED + "You do not have permission to create a ChestBank on the");
player.sendMessage(ChatColor.WHITE + args[1].toLowerCase() + ChatColor.RED + " network!");
return true;
} else if (args.length == 2 && !useNetworkPerms && !player.hasPermission("chestbank.create.networks")) {
player.sendMessage(ChatColor.RED + "You do not have permission to create a ChestBank on named networks!");
return true;
}
Block block = player.getTargetBlock(null, 4);
if (block.getTypeId() != 54) {
player.sendMessage(ChatColor.RED + "You're not looking at a chest!");
return true;
}
if (isBankBlock(block)) {
player.sendMessage(ChatColor.RED + "That is already a ChestBank!");
return true;
}
if (gotVault && gotEconomy && createFee != 0) {
if ((args.length == 2 && !player.hasPermission("chestbank.free.create.networks")) || (args.length == 1 && !player.hasPermission("chestbank.free.create"))) {
if (vault.economy.getBalance(player.getName()) < createFee) {
player.sendMessage(ChatColor.RED + "You cannot afford the ChestBank creation fee of");
player.sendMessage(ChatColor.WHITE + vault.economy.format(createFee) + ChatColor.RED + "!");
return true;
}
}
}
if (args.length == 2) {
// Network specified
String network = args[1];
String bankNames = banksConfig.getString("networks.names", "");
if (bankNames.equals("")) {
bankNames = args[1];
} else {
String[] bankNamesArray = bankNames.split(":");
boolean exists = false;
for (String bankName : bankNamesArray) {
if (bankName.equals(args[1])) {
exists = true;
}
}
if (!exists) {
bankNames += ":" + args[1];
}
}
banksConfig.set("networks.names", bankNames);
ConfigurationSection networkBank = banksConfig.getConfigurationSection("networks." + args[1]);
String locsList = "";
if (networkBank != null) {
locsList = networkBank.getString("locations", "");
if (!locsList.equals("")) {
locsList += ";";
}
}
locsList += block.getWorld().getName() + ":" + block.getX() + ":" + block.getY() + ":" + block.getZ();
Block doubleChest = getDoubleChest(block);
if (doubleChest != null) {
locsList += ":" + doubleChest.getX() + ":" + doubleChest.getY() + ":" + doubleChest.getZ();
}
banksConfig.set("networks." + args[1] + ".locations", locsList);
saveChestBanks();
player.sendMessage(ChatColor.GOLD + "ChestBank created on " + ChatColor.WHITE + network + ChatColor.GOLD + " Network!");
if (gotVault && gotEconomy && createFee != 0 && !player.hasPermission("chestbank.free.create.networks")) {
vault.economy.withdrawPlayer(player.getName(), createFee);
player.sendMessage(ChatColor.GOLD + "You were charged " + ChatColor.WHITE + vault.economy.format(createFee) + ChatColor.GOLD + " for ChestBank creation!");
}
return true;
}
String bankList = banksConfig.getString("banks", "");
if (bankList.equals("")) {
bankList += block.getWorld().getName() + ":" + block.getX() + ":" + block.getY() + ":" + block.getZ();
}
else {
bankList += ";" + block.getWorld().getName() + ":" + block.getX() + ":" + block.getY() + ":" + block.getZ();
}
Block doubleChest = getDoubleChest(block);
if (doubleChest != null) {
bankList += ":" + doubleChest.getX() + ":" + doubleChest.getY() + ":" + doubleChest.getZ();
}
banksConfig.set("banks", bankList);
saveChestBanks();
player.sendMessage(ChatColor.GOLD + "ChestBank created!");
if (gotVault && gotEconomy && createFee != 0 && !player.hasPermission("chestbank.free.create")) {
vault.economy.withdrawPlayer(player.getName(), createFee);
player.sendMessage(ChatColor.GOLD + "You were charged " + ChatColor.WHITE + vault.economy.format(createFee) + ChatColor.GOLD + " for ChestBank creation!");
}
return true;
}
else if (args[0].equalsIgnoreCase("remove")) {
if (!player.hasPermission("chestbank.remove")) {
player.sendMessage(ChatColor.RED + "You do not have permission to remove a ChestBank!");
return true;
}
Block block = player.getTargetBlock(null, 4);
if (isNetworkBank(block) && useNetworkPerms && (!player.hasPermission("chestbank.remove.networks." + getNetwork(block).toLowerCase()) && !player.hasPermission("chestbank.remove.networks.*"))) {
player.sendMessage(ChatColor.RED + "You do not have permission to remove a ChestBank on the");
player.sendMessage(ChatColor.WHITE + getNetwork(block) + ChatColor.RED + " network!");
return true;
} else if (isNetworkBank(block) && !useNetworkPerms && !player.hasPermission("chestbank.remove.networks")) {
player.sendMessage(ChatColor.RED + "You do not have permission to remove a ChestBank on named networks!");
return true;
}
if (!isBankBlock(block)) {
player.sendMessage(ChatColor.RED + "You're not looking at a ChestBank!");
return true;
}
if (isNetworkBank(block)) {
String networkNames = banksConfig.getString("networks.names");
String[] networkNamesArray = networkNames.split(":");
String networkName = getNetwork(block);
String networkLocs = banksConfig.getString("networks." + networkName + ".locations", "");
String newNetworkLocs = "";
for (String location : networkLocs.split(";")) {
String[] loc = location.split(":");
if (loc.length == 4) {
String bankWorld = loc[0];
int bankX = Integer.parseInt(loc[1]);
int bankY = Integer.parseInt(loc[2]);
int bankZ = Integer.parseInt(loc[3]);
if (!bankWorld.equals(block.getWorld().getName()) || bankX != block.getX() || bankY != block.getY() || bankZ != block.getZ()) {
if (!newNetworkLocs.equals("")) {
newNetworkLocs += ";";
}
newNetworkLocs += bankWorld + ":" + bankX + ":" + bankY + ":" + bankZ;
}
} else if (loc.length == 7) {
String bankWorld = loc[0];
int bankX = Integer.parseInt(loc[1]);
int bankY = Integer.parseInt(loc[2]);
int bankZ = Integer.parseInt(loc[3]);
int bankA = Integer.parseInt(loc[4]);
int bankB = Integer.parseInt(loc[5]);
int bankC = Integer.parseInt(loc[6]);
if (!bankWorld.equals(block.getWorld().getName()) || (!(bankX == block.getX() && bankY == block.getY() && bankZ == block.getZ()) && !(bankA == block.getX() && bankB == block.getY() && bankC == block.getZ()) )) {
if (!newNetworkLocs.equals("")) {
newNetworkLocs += ";";
}
newNetworkLocs += bankWorld + ":" + bankX + ":" + bankY + ":" + bankZ + ":" + bankA + ":" + bankB + ":" + bankC;
}
}
}
banksConfig.set("networks." + networkName + ".locations", newNetworkLocs);
saveChestBanks();
player.sendMessage(ChatColor.GOLD + "ChestBank removed from " + ChatColor.WHITE + networkName + ChatColor.GOLD + " network!");
return true;
}
String bankList = banksConfig.getString("banks");
String[] bankSplit = bankList.split(";");
if (bankSplit.length == 0 || bankSplit.length == 1) {
banksConfig.set("banks", "");
}
else {
String newBankList = "";
for (String chestBank : bankSplit) {
String[] bankLoc = chestBank.split(":");
if (bankLoc.length == 4) {
String blockWorld = bankLoc[0];
int blockX = Integer.parseInt(bankLoc[1]);
int blockY = Integer.parseInt(bankLoc[2]);
int blockZ = Integer.parseInt(bankLoc[3]);
if (!blockWorld.equals(block.getWorld().getName()) || blockX != block.getX() || blockY != block.getY() || blockZ != block.getZ()) {
if (!newBankList.equals("")) {
newBankList += ";";
}
newBankList += blockWorld + ":" + blockX + ":" + blockY + ":" + blockZ;
}
}
else {
String blockWorld = bankLoc[0];
int blockX = Integer.parseInt(bankLoc[1]);
int blockY = Integer.parseInt(bankLoc[2]);
int blockZ = Integer.parseInt(bankLoc[3]);
int blockA = Integer.parseInt(bankLoc[4]);
int blockB = Integer.parseInt(bankLoc[5]);
int blockC = Integer.parseInt(bankLoc[6]);
if (!(blockX == block.getX() && blockY == block.getY() && blockZ == block.getZ()) && !(blockA == block.getX() && blockB == block.getY() && blockC == block.getZ())) {
if (!newBankList.equals("")) {
newBankList += ";";
}
newBankList += blockWorld + ":" + blockX + ":" + blockY + ":" + blockZ + ":" + blockA + ":" + blockB + ":" + blockC;
}
}
}
banksConfig.set("banks", newBankList);
}
saveChestBanks();
player.sendMessage(ChatColor.GOLD + "ChestBank removed!");
return true;
}
else if (args[0].equalsIgnoreCase("info")) {
if (!player.hasPermission("chestbank.info")) {
player.sendMessage(ChatColor.RED + "You do not have permission to get ChestBank info!");
return true;
}
Block block = player.getTargetBlock(null, 4);
if (!isBankBlock(block)) {
player.sendMessage(ChatColor.RED + "This block is not a ChestBank!");
return true;
} else {
if (!isNetworkBank(block)) {
player.sendMessage(ChatColor.GOLD + "This ChestBank is on the main network!");
return true;
} else {
String network = getNetwork(block);
player.sendMessage(ChatColor.GOLD + "This ChestBank is on the " + ChatColor.WHITE + network + ChatColor.GOLD + " network!");
return true;
}
}
}
else if (args[0].equalsIgnoreCase("list")) {
String bankLocs = banksConfig.getString("banks", "");
player.sendMessage(ChatColor.GOLD + "ChestBank Networks:");
int banks = 0;
if (!bankLocs.equals("")) {
banks = bankLocs.split(";").length;
}
player.sendMessage(ChatColor.GOLD + " Main Network: " + ChatColor.WHITE + banks + " Location(s)");
String networkNames = banksConfig.getString("networks.names", "");
if (!networkNames.equals("")) {
String[] networks = networkNames.split(":");
for (String network : networks) {
bankLocs = banksConfig.getString("networks." + network + ".locations", "");
banks = 0;
if (!bankLocs.equals("")) {
banks = bankLocs.split(";").length;
}
player.sendMessage(ChatColor.GOLD + " " + network + " Network: " + ChatColor.WHITE + banks + " Location(s)");
}
}
return true;
}
}
else if (args.length == 2) {
if (!args[0].equalsIgnoreCase("see")) {
return false;
}
if (!player.hasPermission("chestbank.see")) {
player.sendMessage(ChatColor.RED + "You do not have permission to access other players' accounts!");
return true;
}
Block block = player.getTargetBlock(null, 4);
if (isNetworkBank(block) && !player.hasPermission("chestbank.see.networks")) {
player.sendMessage(ChatColor.RED + "You do not have permission to access other players' " + ChatColor.WHITE + getNetwork(block) + ChatColor.GOLD + " accounts!");
return true;
}
if (!isBankBlock(block)) {
player.sendMessage(ChatColor.RED + "You're not looking at a ChestBank!");
return true;
}
OfflinePlayer target = getServer().getOfflinePlayer(args[1]);
String account = "";
if (isNetworkBank(block)) {
account = getNetwork(block) + ">>" + target.getName();
} else {
account = target.getName();
}
if (chestAccounts.containsKey(account)) {
DoubleChestInventory lc = chestAccounts.get(account);
player.openInventory(lc);
}
else {
player.sendMessage(ChatColor.RED + target.getName() + " does not have a ChestBank account here!");
}
return true;
}
return false;
}
|
public boolean onCommand (CommandSender sender, Command cmd, String commandLabel, String[] args) {
if (!(sender instanceof Player)) {
sender.sendMessage("Sorry! The console can't use this command!");
return true;
}
Player player = (Player) sender;
if (args.length == 0) {
// Command list requested
PluginDescriptionFile pdfFile = this.getDescription();
player.sendMessage(ChatColor.GOLD + pdfFile.getName() + " version " + pdfFile.getVersion() + " by " + pdfFile.getAuthors());
boolean found = false;
if (player.hasPermission("chestbank.info")) {
player.sendMessage(ChatColor.GOLD + " /chestbank info " + ChatColor.GRAY + ": Get targetted ChestBank's info.");
found = true;
}
if (player.hasPermission("chestbank.list")) {
player.sendMessage(ChatColor.GOLD + " /chestbank list " + ChatColor.GRAY + ": List all existing ChestBank networks.");
found = true;
}
if (player.hasPermission("chestbank.create")) {
player.sendMessage(ChatColor.GOLD + " /chestbank create " + ChatColor.GRAY + ": Make targetted chest a ChestBank.");
found = true;
}
if ((!useNetworkPerms && player.hasPermission("chestbank.create.networks")) || useNetworkPerms) {
player.sendMessage(ChatColor.GOLD + " /chestbank create {network}" + ChatColor.GRAY + ": Create a Chestbank on the");
player.sendMessage(ChatColor.GRAY + " named network.");
found = true;
}
if (player.hasPermission("chestbank.remove")) {
player.sendMessage(ChatColor.GOLD + " /chestbank remove " + ChatColor.GRAY + ": Make targetted ChestBank a chest.");
found = true;
}
if (player.hasPermission("chestbank.see")) {
player.sendMessage(ChatColor.GOLD + " /chestbank see [player] " + ChatColor.GRAY + ": View player's ChestBank account.");
found = true;
}
if (!found) {
player.sendMessage(ChatColor.GOLD + "There are no ChestBank commands you can use!");
found = true;
}
return true;
}
else if (args.length == 1 || (args.length == 2 && (args[0].equalsIgnoreCase("create") || args[0].equalsIgnoreCase("remove")))) {
if (args[0].equalsIgnoreCase("see")) {
player.sendMessage(ChatColor.RED + "Please specify a player!");
return false;
}
// Create, Remove, List, Info
else if (args[0].equalsIgnoreCase("create")) {
if (!player.hasPermission("chestbank.create")) {
player.sendMessage(ChatColor.RED + "You do not have permission to create a ChestBank!");
return true;
}
if (args.length == 2 && useNetworkPerms && (!player.hasPermission("chestbank.create.networks." + args[1].toLowerCase()) && !player.hasPermission("chestbank.create.networks.*"))) {
player.sendMessage(ChatColor.RED + "You do not have permission to create a ChestBank on the");
player.sendMessage(ChatColor.WHITE + args[1].toLowerCase() + ChatColor.RED + " network!");
return true;
} else if (args.length == 2 && !useNetworkPerms && !player.hasPermission("chestbank.create.networks")) {
player.sendMessage(ChatColor.RED + "You do not have permission to create a ChestBank on named networks!");
return true;
}
Block block = player.getTargetBlock(null, 4);
if (block.getTypeId() != 54) {
player.sendMessage(ChatColor.RED + "You're not looking at a chest!");
return true;
}
if (isBankBlock(block)) {
player.sendMessage(ChatColor.RED + "That is already a ChestBank!");
return true;
}
if (gotVault && gotEconomy && createFee != 0) {
if ((args.length == 2 && !player.hasPermission("chestbank.free.create.networks")) || (args.length == 1 && !player.hasPermission("chestbank.free.create"))) {
if (vault.economy.getBalance(player.getName()) < createFee) {
player.sendMessage(ChatColor.RED + "You cannot afford the ChestBank creation fee of");
player.sendMessage(ChatColor.WHITE + vault.economy.format(createFee) + ChatColor.RED + "!");
return true;
}
}
}
if (args.length == 2) {
// Network specified
String network = args[1];
String bankNames = banksConfig.getString("networks.names", "");
if (bankNames.equals("")) {
bankNames = args[1];
} else {
String[] bankNamesArray = bankNames.split(":");
boolean exists = false;
for (String bankName : bankNamesArray) {
if (bankName.equals(args[1])) {
exists = true;
}
}
if (!exists) {
bankNames += ":" + args[1];
}
}
banksConfig.set("networks.names", bankNames);
ConfigurationSection networkBank = banksConfig.getConfigurationSection("networks." + args[1]);
String locsList = "";
if (networkBank != null) {
locsList = networkBank.getString("locations", "");
if (!locsList.equals("")) {
locsList += ";";
}
}
locsList += block.getWorld().getName() + ":" + block.getX() + ":" + block.getY() + ":" + block.getZ();
Block doubleChest = getDoubleChest(block);
if (doubleChest != null) {
locsList += ":" + doubleChest.getX() + ":" + doubleChest.getY() + ":" + doubleChest.getZ();
}
banksConfig.set("networks." + args[1] + ".locations", locsList);
saveChestBanks();
player.sendMessage(ChatColor.GOLD + "ChestBank created on " + ChatColor.WHITE + network + ChatColor.GOLD + " Network!");
if (gotVault && gotEconomy && createFee != 0 && !player.hasPermission("chestbank.free.create.networks")) {
vault.economy.withdrawPlayer(player.getName(), createFee);
player.sendMessage(ChatColor.GOLD + "You were charged " + ChatColor.WHITE + vault.economy.format(createFee) + ChatColor.GOLD + " for ChestBank creation!");
}
return true;
}
String bankList = banksConfig.getString("banks", "");
if (bankList.equals("")) {
bankList += block.getWorld().getName() + ":" + block.getX() + ":" + block.getY() + ":" + block.getZ();
}
else {
bankList += ";" + block.getWorld().getName() + ":" + block.getX() + ":" + block.getY() + ":" + block.getZ();
}
Block doubleChest = getDoubleChest(block);
if (doubleChest != null) {
bankList += ":" + doubleChest.getX() + ":" + doubleChest.getY() + ":" + doubleChest.getZ();
}
banksConfig.set("banks", bankList);
saveChestBanks();
player.sendMessage(ChatColor.GOLD + "ChestBank created!");
if (gotVault && gotEconomy && createFee != 0 && !player.hasPermission("chestbank.free.create")) {
vault.economy.withdrawPlayer(player.getName(), createFee);
player.sendMessage(ChatColor.GOLD + "You were charged " + ChatColor.WHITE + vault.economy.format(createFee) + ChatColor.GOLD + " for ChestBank creation!");
}
return true;
}
else if (args[0].equalsIgnoreCase("remove")) {
if (!player.hasPermission("chestbank.remove")) {
player.sendMessage(ChatColor.RED + "You do not have permission to remove a ChestBank!");
return true;
}
Block block = player.getTargetBlock(null, 4);
if (isNetworkBank(block) && useNetworkPerms && (!player.hasPermission("chestbank.remove.networks." + getNetwork(block).toLowerCase()) && !player.hasPermission("chestbank.remove.networks.*"))) {
player.sendMessage(ChatColor.RED + "You do not have permission to remove a ChestBank on the");
player.sendMessage(ChatColor.WHITE + getNetwork(block) + ChatColor.RED + " network!");
return true;
} else if (isNetworkBank(block) && !useNetworkPerms && !player.hasPermission("chestbank.remove.networks")) {
player.sendMessage(ChatColor.RED + "You do not have permission to remove a ChestBank on named networks!");
return true;
}
if (!isBankBlock(block)) {
player.sendMessage(ChatColor.RED + "You're not looking at a ChestBank!");
return true;
}
if (isNetworkBank(block)) {
String networkNames = banksConfig.getString("networks.names");
String[] networkNamesArray = networkNames.split(":");
String networkName = getNetwork(block);
String networkLocs = banksConfig.getString("networks." + networkName + ".locations", "");
String newNetworkLocs = "";
for (String location : networkLocs.split(";")) {
String[] loc = location.split(":");
if (loc.length == 4) {
String bankWorld = loc[0];
int bankX = Integer.parseInt(loc[1]);
int bankY = Integer.parseInt(loc[2]);
int bankZ = Integer.parseInt(loc[3]);
if (!bankWorld.equals(block.getWorld().getName()) || bankX != block.getX() || bankY != block.getY() || bankZ != block.getZ()) {
if (!newNetworkLocs.equals("")) {
newNetworkLocs += ";";
}
newNetworkLocs += bankWorld + ":" + bankX + ":" + bankY + ":" + bankZ;
}
} else if (loc.length == 7) {
String bankWorld = loc[0];
int bankX = Integer.parseInt(loc[1]);
int bankY = Integer.parseInt(loc[2]);
int bankZ = Integer.parseInt(loc[3]);
int bankA = Integer.parseInt(loc[4]);
int bankB = Integer.parseInt(loc[5]);
int bankC = Integer.parseInt(loc[6]);
if (!bankWorld.equals(block.getWorld().getName()) || (!(bankX == block.getX() && bankY == block.getY() && bankZ == block.getZ()) && !(bankA == block.getX() && bankB == block.getY() && bankC == block.getZ()) )) {
if (!newNetworkLocs.equals("")) {
newNetworkLocs += ";";
}
newNetworkLocs += bankWorld + ":" + bankX + ":" + bankY + ":" + bankZ + ":" + bankA + ":" + bankB + ":" + bankC;
}
}
}
banksConfig.set("networks." + networkName + ".locations", newNetworkLocs);
saveChestBanks();
player.sendMessage(ChatColor.GOLD + "ChestBank removed from " + ChatColor.WHITE + networkName + ChatColor.GOLD + " network!");
return true;
}
String bankList = banksConfig.getString("banks");
String[] bankSplit = bankList.split(";");
if (bankSplit.length == 0 || bankSplit.length == 1) {
banksConfig.set("banks", "");
}
else {
String newBankList = "";
for (String chestBank : bankSplit) {
String[] bankLoc = chestBank.split(":");
if (bankLoc.length == 4) {
String blockWorld = bankLoc[0];
int blockX = Integer.parseInt(bankLoc[1]);
int blockY = Integer.parseInt(bankLoc[2]);
int blockZ = Integer.parseInt(bankLoc[3]);
if (!blockWorld.equals(block.getWorld().getName()) || blockX != block.getX() || blockY != block.getY() || blockZ != block.getZ()) {
if (!newBankList.equals("")) {
newBankList += ";";
}
newBankList += blockWorld + ":" + blockX + ":" + blockY + ":" + blockZ;
}
}
else {
String blockWorld = bankLoc[0];
int blockX = Integer.parseInt(bankLoc[1]);
int blockY = Integer.parseInt(bankLoc[2]);
int blockZ = Integer.parseInt(bankLoc[3]);
int blockA = Integer.parseInt(bankLoc[4]);
int blockB = Integer.parseInt(bankLoc[5]);
int blockC = Integer.parseInt(bankLoc[6]);
if (!(blockX == block.getX() && blockY == block.getY() && blockZ == block.getZ()) && !(blockA == block.getX() && blockB == block.getY() && blockC == block.getZ())) {
if (!newBankList.equals("")) {
newBankList += ";";
}
newBankList += blockWorld + ":" + blockX + ":" + blockY + ":" + blockZ + ":" + blockA + ":" + blockB + ":" + blockC;
}
}
}
banksConfig.set("banks", newBankList);
}
saveChestBanks();
player.sendMessage(ChatColor.GOLD + "ChestBank removed!");
return true;
}
else if (args[0].equalsIgnoreCase("info")) {
if (!player.hasPermission("chestbank.info")) {
player.sendMessage(ChatColor.RED + "You do not have permission to get ChestBank info!");
return true;
}
Block block = player.getTargetBlock(null, 4);
if (!isBankBlock(block)) {
player.sendMessage(ChatColor.RED + "This block is not a ChestBank!");
return true;
} else {
if (!isNetworkBank(block)) {
player.sendMessage(ChatColor.GOLD + "This ChestBank is on the main network!");
return true;
} else {
String network = getNetwork(block);
player.sendMessage(ChatColor.GOLD + "This ChestBank is on the " + ChatColor.WHITE + network + ChatColor.GOLD + " network!");
return true;
}
}
}
else if (args[0].equalsIgnoreCase("list")) {
String bankLocs = banksConfig.getString("banks", "");
player.sendMessage(ChatColor.GOLD + "ChestBank Networks:");
int banks = 0;
if (!bankLocs.equals("")) {
banks = bankLocs.split(";").length;
}
player.sendMessage(ChatColor.GOLD + " Main Network: " + ChatColor.WHITE + banks + " Location(s)");
String networkNames = banksConfig.getString("networks.names", "");
if (!networkNames.equals("")) {
String[] networks = networkNames.split(":");
for (String network : networks) {
bankLocs = banksConfig.getString("networks." + network + ".locations", "");
banks = 0;
if (!bankLocs.equals("")) {
banks = bankLocs.split(";").length;
}
player.sendMessage(ChatColor.GOLD + " " + network + " Network: " + ChatColor.WHITE + banks + " Location(s)");
}
}
return true;
}
}
else if (args.length == 2) {
if (!args[0].equalsIgnoreCase("see")) {
return false;
}
if (!player.hasPermission("chestbank.see")) {
player.sendMessage(ChatColor.RED + "You do not have permission to access other players' accounts!");
return true;
}
Block block = player.getTargetBlock(null, 4);
if (isNetworkBank(block) && !player.hasPermission("chestbank.see.networks")) {
player.sendMessage(ChatColor.RED + "You do not have permission to access other players' " + ChatColor.WHITE + getNetwork(block) + ChatColor.GOLD + " accounts!");
return true;
}
if (!isBankBlock(block)) {
player.sendMessage(ChatColor.RED + "You're not looking at a ChestBank!");
return true;
}
OfflinePlayer target = getServer().getOfflinePlayer(args[1]);
String account = "";
if (isNetworkBank(block)) {
account = getNetwork(block) + ">>" + target.getName();
} else {
account = target.getName();
}
if (chestAccounts.containsKey(account)) {
DoubleChestInventory lc = chestAccounts.get(account);
player.openInventory(lc);
}
else {
player.sendMessage(ChatColor.RED + target.getName() + " does not have a ChestBank account here!");
}
return true;
}
return false;
}
|
diff --git a/src/com/kurento/kas/mscontrol/join/VideoJoinableStreamImpl.java b/src/com/kurento/kas/mscontrol/join/VideoJoinableStreamImpl.java
index 4506885..5ba875f 100644
--- a/src/com/kurento/kas/mscontrol/join/VideoJoinableStreamImpl.java
+++ b/src/com/kurento/kas/mscontrol/join/VideoJoinableStreamImpl.java
@@ -1,174 +1,174 @@
package com.kurento.kas.mscontrol.join;
import java.util.Map;
import java.util.concurrent.LinkedBlockingDeque;
import android.util.Log;
import com.kurento.commons.media.format.SessionSpec;
import com.kurento.commons.media.format.SpecTools;
import com.kurento.commons.mscontrol.MsControlException;
import com.kurento.commons.mscontrol.join.Joinable;
import com.kurento.commons.mscontrol.join.JoinableContainer;
import com.kurento.commons.sdp.enums.MediaType;
import com.kurento.commons.sdp.enums.Mode;
import com.kurento.kas.media.VideoCodecType;
import com.kurento.kas.media.profiles.VideoProfile;
import com.kurento.kas.media.rx.MediaRx;
import com.kurento.kas.media.rx.VideoRx;
import com.kurento.kas.media.tx.MediaTx;
import com.kurento.kas.media.tx.VideoInfoTx;
import com.kurento.kas.mscontrol.mediacomponent.VideoSink;
import com.kurento.kas.mscontrol.networkconnection.RTPInfo;
public class VideoJoinableStreamImpl extends JoinableStreamBase implements
VideoSink, VideoRx {
public final static String LOG_TAG = "VideoJoinableStream";
private VideoProfile videoProfile = null;
private SessionSpec localSessionSpec;
private VideoTxThread videoTxThread = null;
private VideoRxThread videoRxThread = null;
private class Frame {
private byte[] data;
private int width;
private int height;
public Frame(byte[] data, int width, int height) {
this.data = data;
this.width = width;
this.height = height;
}
}
private int QUEUE_SIZE = 2;
private LinkedBlockingDeque<Frame> framesQueue = new LinkedBlockingDeque<Frame>(
QUEUE_SIZE);
private LinkedBlockingDeque<Long> txTimes = new LinkedBlockingDeque<Long>(
QUEUE_SIZE);
public VideoProfile getVideoProfile() {
return videoProfile;
}
public VideoJoinableStreamImpl(JoinableContainer container,
StreamType type, SessionSpec remoteSessionSpec,
SessionSpec localSessionSpec, Integer framesQueueSize) {
super(container, type);
this.localSessionSpec = localSessionSpec;
if (framesQueueSize != null && framesQueueSize > QUEUE_SIZE)
QUEUE_SIZE = framesQueueSize;
Log.d(LOG_TAG, "QUEUE_SIZE: " + QUEUE_SIZE);
Map<MediaType, Mode> mediaTypesModes = SpecTools
.getModesOfFirstMediaTypes(localSessionSpec);
Mode videoMode = mediaTypesModes.get(MediaType.VIDEO);
RTPInfo remoteRTPInfo = new RTPInfo(remoteSessionSpec);
if (videoMode != null) {
VideoCodecType videoCodecType = remoteRTPInfo.getVideoCodecType();
VideoProfile videoProfile = VideoProfile
.getVideoProfileFromVideoCodecType(videoCodecType);
- if ((Mode.SENDRECV.equals(videoMode) || Mode.RECVONLY
+ if ((Mode.SENDRECV.equals(videoMode) || Mode.SENDONLY
.equals(videoMode)) && videoProfile != null) {
VideoInfoTx videoInfo = new VideoInfoTx(videoProfile);
videoInfo.setOut(remoteRTPInfo.getVideoRTPDir());
videoInfo.setPayloadType(remoteRTPInfo.getVideoPayloadType());
int ret = MediaTx.initVideo(videoInfo);
if (ret < 0) {
- Log.d(LOG_TAG, "Error in initVideo");
+ Log.e(LOG_TAG, "Error in initVideo");
MediaTx.finishVideo();
}
this.videoProfile = videoProfile;
this.videoTxThread = new VideoTxThread();
this.videoTxThread.start();
}
if ((Mode.SENDRECV.equals(videoMode) || Mode.RECVONLY
.equals(videoMode))) {
this.videoRxThread = new VideoRxThread(this);
this.videoRxThread.start();
}
}
}
@Override
public void putVideoFrame(byte[] data, int width, int height) {
if (framesQueue.size() >= QUEUE_SIZE)
framesQueue.pollLast();
framesQueue.offerFirst(new Frame(data, width, height));
}
@Override
public void putVideoFrameRx(int[] rgb, int width, int height) {
try {
for (Joinable j : getJoinees(Direction.SEND))
if (j instanceof VideoRx)
((VideoRx) j).putVideoFrameRx(rgb, width, height);
} catch (MsControlException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void stop() {
if (videoTxThread != null)
videoTxThread.interrupt();
Log.d(LOG_TAG, "finishVideo");
MediaTx.finishVideo();
Log.d(LOG_TAG, "stopVideoRx");
MediaRx.stopVideoRx();
}
private class VideoTxThread extends Thread {
@Override
public void run() {
int tFrame = 1000 / videoProfile.getFrameRate();
Frame frameProcessed;
try {
for (int i = 0; i < QUEUE_SIZE; i++)
txTimes.offerFirst(new Long(0));
for (;;) {
long t = System.currentTimeMillis();
long h = (t - txTimes.takeLast()) / QUEUE_SIZE;
if (h < tFrame) {
long s = tFrame - h;
sleep(s);
}
frameProcessed = framesQueue.takeLast();
txTimes.offerFirst(t);
MediaTx.putVideoFrame(frameProcessed.data,
frameProcessed.width, frameProcessed.height);
}
} catch (InterruptedException e) {
Log.d(LOG_TAG, "VideoTxThread stopped");
}
}
}
private class VideoRxThread extends Thread {
private VideoRx videoRx;
public VideoRxThread(VideoRx videoRx) {
this.videoRx = videoRx;
}
@Override
public void run() {
Log.d(LOG_TAG, "startVideoRx");
if (!SpecTools.filterMediaByType(localSessionSpec, "video")
.getMediaSpec().isEmpty()) {
String sdpVideo = SpecTools.filterMediaByType(localSessionSpec,
"video").toString();
MediaRx.startVideoRx(sdpVideo, this.videoRx);
}
}
}
}
| false
| true
|
public VideoJoinableStreamImpl(JoinableContainer container,
StreamType type, SessionSpec remoteSessionSpec,
SessionSpec localSessionSpec, Integer framesQueueSize) {
super(container, type);
this.localSessionSpec = localSessionSpec;
if (framesQueueSize != null && framesQueueSize > QUEUE_SIZE)
QUEUE_SIZE = framesQueueSize;
Log.d(LOG_TAG, "QUEUE_SIZE: " + QUEUE_SIZE);
Map<MediaType, Mode> mediaTypesModes = SpecTools
.getModesOfFirstMediaTypes(localSessionSpec);
Mode videoMode = mediaTypesModes.get(MediaType.VIDEO);
RTPInfo remoteRTPInfo = new RTPInfo(remoteSessionSpec);
if (videoMode != null) {
VideoCodecType videoCodecType = remoteRTPInfo.getVideoCodecType();
VideoProfile videoProfile = VideoProfile
.getVideoProfileFromVideoCodecType(videoCodecType);
if ((Mode.SENDRECV.equals(videoMode) || Mode.RECVONLY
.equals(videoMode)) && videoProfile != null) {
VideoInfoTx videoInfo = new VideoInfoTx(videoProfile);
videoInfo.setOut(remoteRTPInfo.getVideoRTPDir());
videoInfo.setPayloadType(remoteRTPInfo.getVideoPayloadType());
int ret = MediaTx.initVideo(videoInfo);
if (ret < 0) {
Log.d(LOG_TAG, "Error in initVideo");
MediaTx.finishVideo();
}
this.videoProfile = videoProfile;
this.videoTxThread = new VideoTxThread();
this.videoTxThread.start();
}
if ((Mode.SENDRECV.equals(videoMode) || Mode.RECVONLY
.equals(videoMode))) {
this.videoRxThread = new VideoRxThread(this);
this.videoRxThread.start();
}
}
}
|
public VideoJoinableStreamImpl(JoinableContainer container,
StreamType type, SessionSpec remoteSessionSpec,
SessionSpec localSessionSpec, Integer framesQueueSize) {
super(container, type);
this.localSessionSpec = localSessionSpec;
if (framesQueueSize != null && framesQueueSize > QUEUE_SIZE)
QUEUE_SIZE = framesQueueSize;
Log.d(LOG_TAG, "QUEUE_SIZE: " + QUEUE_SIZE);
Map<MediaType, Mode> mediaTypesModes = SpecTools
.getModesOfFirstMediaTypes(localSessionSpec);
Mode videoMode = mediaTypesModes.get(MediaType.VIDEO);
RTPInfo remoteRTPInfo = new RTPInfo(remoteSessionSpec);
if (videoMode != null) {
VideoCodecType videoCodecType = remoteRTPInfo.getVideoCodecType();
VideoProfile videoProfile = VideoProfile
.getVideoProfileFromVideoCodecType(videoCodecType);
if ((Mode.SENDRECV.equals(videoMode) || Mode.SENDONLY
.equals(videoMode)) && videoProfile != null) {
VideoInfoTx videoInfo = new VideoInfoTx(videoProfile);
videoInfo.setOut(remoteRTPInfo.getVideoRTPDir());
videoInfo.setPayloadType(remoteRTPInfo.getVideoPayloadType());
int ret = MediaTx.initVideo(videoInfo);
if (ret < 0) {
Log.e(LOG_TAG, "Error in initVideo");
MediaTx.finishVideo();
}
this.videoProfile = videoProfile;
this.videoTxThread = new VideoTxThread();
this.videoTxThread.start();
}
if ((Mode.SENDRECV.equals(videoMode) || Mode.RECVONLY
.equals(videoMode))) {
this.videoRxThread = new VideoRxThread(this);
this.videoRxThread.start();
}
}
}
|
diff --git a/proxy/src/main/java/org/fedoraproject/candlepin/policy/js/entitlement/EntitlementRules.java b/proxy/src/main/java/org/fedoraproject/candlepin/policy/js/entitlement/EntitlementRules.java
index 25c77d8f4..3a43ec132 100644
--- a/proxy/src/main/java/org/fedoraproject/candlepin/policy/js/entitlement/EntitlementRules.java
+++ b/proxy/src/main/java/org/fedoraproject/candlepin/policy/js/entitlement/EntitlementRules.java
@@ -1,583 +1,584 @@
/**
* Copyright (c) 2009 Red Hat, Inc.
*
* This software is licensed to you under the GNU General Public License,
* version 2 (GPLv2). There is NO WARRANTY for this software, express or
* implied, including the implied warranties of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
* along with this software; if not, see
* http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
*
* Red Hat trademarks are not licensed under GPLv2. No permission is
* granted to use or replicate Red Hat trademarks that are incorporated
* in this software or its documentation.
*/
package org.fedoraproject.candlepin.policy.js.entitlement;
import java.io.Reader;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.script.Invocable;
import javax.script.ScriptEngine;
import javax.script.ScriptException;
import org.apache.log4j.Logger;
import org.fedoraproject.candlepin.model.Attribute;
import org.fedoraproject.candlepin.model.Consumer;
import org.fedoraproject.candlepin.model.Entitlement;
import org.fedoraproject.candlepin.model.Pool;
import org.fedoraproject.candlepin.model.Product;
import org.fedoraproject.candlepin.policy.Enforcer;
import org.fedoraproject.candlepin.policy.ValidationError;
import org.fedoraproject.candlepin.policy.ValidationWarning;
import org.fedoraproject.candlepin.policy.js.ReadOnlyConsumer;
import org.fedoraproject.candlepin.policy.js.ReadOnlyEntitlement;
import org.fedoraproject.candlepin.policy.js.ReadOnlyPool;
import org.fedoraproject.candlepin.policy.js.ReadOnlyProduct;
import org.fedoraproject.candlepin.policy.js.ReadOnlyProductCache;
import org.fedoraproject.candlepin.policy.js.RuleExecutionException;
import org.fedoraproject.candlepin.policy.js.RuleParseException;
import org.fedoraproject.candlepin.service.ProductServiceAdapter;
import org.fedoraproject.candlepin.util.DateSource;
import org.xnap.commons.i18n.I18n;
import com.google.inject.Inject;
import com.google.inject.name.Named;
import edu.emory.mathcs.backport.java.util.Arrays;
/**
* Enforces the Javascript Rules definition.
*/
public class EntitlementRules implements Enforcer {
private static Logger log = Logger.getLogger(EntitlementRules.class);
private static Logger rulesLogger =
Logger.getLogger(EntitlementRules.class.getCanonicalName() + ".rules");
private DateSource dateSource;
private ProductServiceAdapter prodAdapter;
private ScriptEngine jsEngine;
private I18n i18n;
private final Map<String, Set<Rule>> attributesToRules;
private Object entitlementNameSpace;
private static final String PROD_ARCHITECTURE_SEPARATOR = ",";
private static final String PRE_PREFIX = "pre_";
private static final String POST_PREFIX = "post_";
private static final String SELECT_POOL_PREFIX = "select_pool_";
private static final String GLOBAL_SELECT_POOL_FUNCTION = SELECT_POOL_PREFIX +
"global";
private static final String GLOBAL_PRE_FUNCTION = PRE_PREFIX + "global";
private static final String GLOBAL_POST_FUNCTION = POST_PREFIX + "global";
// Since we can't peek inside the return values from our java code,
// do the conversion from a js array to a java array inside rhino's context.
private static final String CONVERT_ARRAY_FUNCTION =
"function convertArray(type, arr) {\n" +
" if (arr == null) {\n" +
" return null;\n" +
" }\n" +
"\n" +
" var jArr = java.lang.reflect.Array.newInstance(type, arr.length);\n" +
" for (var i = 0; i < arr.length; i++) {\n" +
" jArr[i] = arr[i];\n" +
" }\n" +
" return jArr;\n" +
"};\n";
@Inject
public EntitlementRules(DateSource dateSource,
@Named("RulesReader") Reader rulesReader,
ProductServiceAdapter prodAdapter,
ScriptEngine jsEngine, I18n i18n) {
this.dateSource = dateSource;
this.prodAdapter = prodAdapter;
this.jsEngine = jsEngine;
this.i18n = i18n;
if (jsEngine == null) {
throw new RuntimeException("No Javascript engine");
}
try {
this.jsEngine.eval(rulesReader);
entitlementNameSpace =
((Invocable) this.jsEngine).invokeFunction("entitlement_name_space");
attributesToRules = parseAttributeMappings(
(String) ((Invocable) this.jsEngine).invokeMethod(
entitlementNameSpace, "attribute_mappings"));
this.jsEngine.eval(CONVERT_ARRAY_FUNCTION);
}
catch (ScriptException ex) {
throw new RuleParseException(ex);
}
catch (NoSuchMethodException ex) {
throw new RuleParseException(ex);
}
}
@Override
public PreEntHelper preEntitlement(
Consumer consumer, Pool entitlementPool, Integer quantity) {
PreEntHelper preHelper = runPreEntitlement(consumer, entitlementPool, quantity);
if (entitlementPool.isExpired(dateSource)) {
preHelper.getResult().addError(
new ValidationError(i18n.tr("Entitlements for {0} expired on: {1}",
entitlementPool.getProductId(),
entitlementPool.getEndDate())));
}
return preHelper;
}
/**
* Both products and pools can carry attributes, we need to trigger rules for each.
* In this map, pool attributes will override product attributes, should the same
* key be set for both.
*
* @param product Product
* @param pool Pool can be null.
* @return Map of all attribute names and values. Pool attributes have priority.
*/
private Map<String, String> getFlattenedAttributes(Product product, Pool pool) {
Map<String, String> allAttributes = new HashMap<String, String>();
for (Attribute a : product.getAttributes()) {
allAttributes.put(a.getName(), a.getValue());
}
if (pool != null) {
for (Attribute a : pool.getAttributes()) {
allAttributes.put(a.getName(), a.getValue());
}
}
return allAttributes;
}
private PreEntHelper runPreEntitlement(Consumer consumer, Pool pool, Integer quantity) {
PreEntHelper preHelper = new PreEntHelper(quantity);
// Provide objects for the script:
String topLevelProductId = pool.getProductId();
Product product = prodAdapter.getProductById(topLevelProductId);
Map<String, String> allAttributes = getFlattenedAttributes(product, pool);
jsEngine.put("consumer", new ReadOnlyConsumer(consumer));
jsEngine.put("product", new ReadOnlyProduct(product));
jsEngine.put("pool", new ReadOnlyPool(pool, new ReadOnlyProductCache(prodAdapter)));
jsEngine.put("pre", preHelper);
jsEngine.put("attributes", allAttributes);
jsEngine.put("prodAttrSeparator", PROD_ARCHITECTURE_SEPARATOR);
jsEngine.put("log", rulesLogger);
log.debug("Running pre-entitlement rules for: " + consumer.getUuid() +
" product: " + topLevelProductId);
List<Rule> matchingRules
= rulesForAttributes(allAttributes.keySet(), attributesToRules);
if (matchingRules.isEmpty()) {
invokeGlobalPreEntitlementRule();
}
else {
callPreEntitlementRules(matchingRules);
}
if (log.isDebugEnabled()) {
for (ValidationError error : preHelper.getResult().getErrors()) {
log.debug(" Rule error: " + error.getResourceKey());
}
for (ValidationWarning warning : preHelper.getResult().getWarnings()) {
log.debug(" Rule warning: " + warning.getResourceKey());
}
}
return preHelper;
}
@Override
public PostEntHelper postEntitlement(
Consumer consumer, PostEntHelper postEntHelper, Entitlement ent) {
runPostEntitlement(postEntHelper, ent);
return postEntHelper;
}
private void runPostEntitlement(PostEntHelper postHelper, Entitlement ent) {
Pool pool = ent.getPool();
Consumer c = ent.getConsumer();
// Provide objects for the script:
String topLevelProductId = pool.getProductId();
Product product = prodAdapter.getProductById(topLevelProductId);
Map<String, String> allAttributes = getFlattenedAttributes(product, pool);
jsEngine.put("consumer", new ReadOnlyConsumer(c));
jsEngine.put("product", new ReadOnlyProduct(product));
jsEngine.put("post", postHelper);
jsEngine.put("pool", new ReadOnlyPool(pool, new ReadOnlyProductCache(prodAdapter)));
jsEngine.put("entitlement", new ReadOnlyEntitlement(ent));
jsEngine.put("attributes", allAttributes);
jsEngine.put("log", rulesLogger);
log.debug("Running post-entitlement rules for: " + c.getUuid() +
" product: " + topLevelProductId);
List<Rule> matchingRules
= rulesForAttributes(allAttributes.keySet(), attributesToRules);
if (matchingRules.isEmpty()) {
invokeGlobalPostEntitlementRule();
}
else {
callPostEntitlementRules(matchingRules);
}
}
public List<Pool> selectBestPools(Consumer consumer, String[] productIds,
List<Pool> pools) {
Invocable inv = (Invocable) jsEngine;
ReadOnlyProductCache productCache = new ReadOnlyProductCache(prodAdapter);
- log.info("Selecting best entitlement pool for product: " + Arrays.toString(productIds));
+ log.info("Selecting best entitlement pool for product: " +
+ Arrays.toString(productIds));
List<ReadOnlyPool> readOnlyPools = ReadOnlyPool.fromCollection(pools, productCache);
List<Product> products = new LinkedList<Product>();
Set<Rule> matchingRules = new HashSet<Rule>();
for (String productId : productIds) {
Product product = prodAdapter.getProductById(productId);
products.add(product);
Map<String, String> allAttributes = getFlattenedAttributes(product, null);
matchingRules.addAll(rulesForAttributes(allAttributes.keySet(),
attributesToRules));
}
Set<ReadOnlyProduct> readOnlyProducts = ReadOnlyProduct.fromProducts(products);
productCache.addProducts(readOnlyProducts);
// Provide objects for the script:
jsEngine.put("pools", readOnlyPools.toArray());
jsEngine.put("products", readOnlyProducts.toArray());
jsEngine.put("log", log);
ReadOnlyPool[] result = null;
boolean foundMatchingRule = false;
for (Rule rule : matchingRules) {
try {
Object output = inv.invokeMethod(entitlementNameSpace,
SELECT_POOL_PREFIX + rule.getRuleName());
result = (ReadOnlyPool[]) inv.invokeFunction("convertArray",
org.fedoraproject.candlepin.policy.js.ReadOnlyPool.class,
output);
foundMatchingRule = true;
log.info("Excuted javascript rule: " + SELECT_POOL_PREFIX +
rule.getRuleName());
break;
}
catch (NoSuchMethodException e) {
// continue on to the next rule in the list.
}
catch (ScriptException e) {
throw new RuleExecutionException(e);
}
}
if (!foundMatchingRule) {
try {
Object output = inv.invokeMethod(entitlementNameSpace,
GLOBAL_SELECT_POOL_FUNCTION);
result = (ReadOnlyPool[]) inv.invokeFunction("convertArray",
org.fedoraproject.candlepin.policy.js.ReadOnlyPool.class,
output);
log.info("Excuted javascript rule: " +
GLOBAL_SELECT_POOL_FUNCTION);
}
catch (NoSuchMethodException ex) {
log.warn("No default rule found: " +
GLOBAL_SELECT_POOL_FUNCTION);
log.warn("Resorting to default pool selection behavior.");
return selectBestPoolDefault(pools);
}
catch (ScriptException ex) {
throw new RuleExecutionException(ex);
}
}
if (pools.size() > 0 && result == null) {
throw new RuleExecutionException(
- "Rule did not select a pool for products: " + Arrays.toString(productIds));
+ "Rule did not select a pool for products: " + Arrays.toString(productIds));
}
List<Pool> bestPools = new LinkedList<Pool>();
for (Pool p : pools) {
for (ReadOnlyPool rp : result) {
rp.getId();
p.getId().equals("foo");
if (p.getId().equals(rp.getId())) {
log.debug("Best pool: " + p);
bestPools.add(p);
break;
}
}
}
if (bestPools.size() > 0) {
return bestPools;
}
else {
return null;
}
}
/**
* Default behavior if no product specific and no global pool select rules
* exist.
*
* @param pools
* Pools to choose from.
* @return First pool in the list. (default behavior)
*/
private List<Pool> selectBestPoolDefault(List<Pool> pools) {
if (pools.size() > 0) {
return pools;
}
return null;
}
public List<Rule> rulesForAttributes(Set<String> attributes,
Map<String, Set<Rule>> rules) {
Set<Rule> possibleMatches = new HashSet<Rule>();
for (String attribute : attributes) {
if (rules.containsKey(attribute)) {
possibleMatches.addAll(rules.get(attribute));
}
}
List<Rule> matches = new LinkedList<Rule>();
for (Rule rule : possibleMatches) {
if (attributes.containsAll(rule.getAttributes())) {
matches.add(rule);
}
}
// Always run the global rule, and run it first
matches.add(new Rule("global", 0, new HashSet<String>()));
Collections.sort(matches, new RuleOrderComparator());
return matches;
}
public Map<String, Set<Rule>> parseAttributeMappings(String mappings) {
Map<String, Set<Rule>> toReturn = new HashMap<String, Set<Rule>>();
if (mappings.trim().isEmpty()) {
return toReturn;
}
String[] separatedMappings = mappings.split(",");
for (String mapping : separatedMappings) {
Rule rule = parseRule(mapping);
for (String attribute : rule.getAttributes()) {
if (!toReturn.containsKey(attribute)) {
toReturn.put(attribute,
new HashSet<Rule>(Collections.singletonList(rule)));
}
toReturn.get(attribute).add(rule);
}
}
return toReturn;
}
public Rule parseRule(String toParse) {
String[] tokens = toParse.split(":");
if (tokens.length < 3) {
throw new IllegalArgumentException(
i18n.tr(
"'{0}' Should contain name, priority and at least one attribute",
toParse)
);
}
Set<String> attributes = new HashSet<String>();
for (int i = 2; i < tokens.length; i++) {
attributes.add(tokens[i].trim());
}
try {
return new Rule(tokens[0].trim(), Integer.parseInt(tokens[1]), attributes);
}
catch (NumberFormatException e) {
throw new IllegalArgumentException(
i18n.tr("second parameter should be the priority number.", e));
}
}
private void callPreEntitlementRules(List<Rule> matchingRules) {
Invocable inv = (Invocable) jsEngine;
for (Rule rule : matchingRules) {
try {
inv.invokeMethod(entitlementNameSpace, PRE_PREFIX + rule.getRuleName());
log.debug("Ran rule: " + PRE_PREFIX + rule.getRuleName());
}
catch (NoSuchMethodException e) {
invokeGlobalPreEntitlementRule();
}
catch (ScriptException e) {
throw new RuleExecutionException(e);
}
}
}
private void callPostEntitlementRules(List<Rule> matchingRules) {
Invocable inv = (Invocable) jsEngine;
for (Rule rule : matchingRules) {
try {
inv.invokeMethod(entitlementNameSpace, POST_PREFIX + rule.getRuleName());
log.debug("Ran rule: " + POST_PREFIX + rule.getRuleName());
}
catch (NoSuchMethodException e) {
invokeGlobalPostEntitlementRule();
}
catch (ScriptException e) {
throw new RuleExecutionException(e);
}
}
}
private void invokeGlobalPreEntitlementRule() {
Invocable inv = (Invocable) jsEngine;
// No method for this product, try to find a global function, if
// neither exists this is ok and we'll just carry on.
try {
inv.invokeMethod(entitlementNameSpace, GLOBAL_PRE_FUNCTION);
log.debug("Ran rule: " + GLOBAL_PRE_FUNCTION);
}
catch (NoSuchMethodException ex) {
// This is fine, I hope...
log.warn("No default rule found: " + GLOBAL_PRE_FUNCTION);
}
catch (ScriptException ex) {
throw new RuleExecutionException(ex);
}
}
private void invokeGlobalPostEntitlementRule() {
Invocable inv = (Invocable) jsEngine;
// No method for this product, try to find a global function, if
// neither exists this is ok and we'll just carry on.
try {
inv.invokeFunction(GLOBAL_POST_FUNCTION);
log.debug("Ran rule: " + GLOBAL_POST_FUNCTION);
}
catch (NoSuchMethodException ex) {
// This is fine, I hope...
log.warn("No default rule found: " + GLOBAL_POST_FUNCTION);
}
catch (ScriptException ex) {
throw new RuleExecutionException(ex);
}
}
/**
* RuleOrderComparator
*/
public static class RuleOrderComparator implements Comparator<Rule> {
@Override
public int compare(Rule o1, Rule o2) {
return Integer.valueOf(o2.getOrder()).compareTo(
Integer.valueOf(o1.getOrder()));
}
}
/**
* Rule
*/
public static class Rule {
private final String ruleName;
private final int order;
private final Set<String> attributes;
public Rule(String ruleName, int order, Set<String> attributes) {
this.ruleName = ruleName;
this.order = order;
this.attributes = attributes;
}
public String getRuleName() {
return ruleName;
}
public int getOrder() {
return order;
}
public Set<String> getAttributes() {
return attributes;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result +
((attributes == null) ? 0 : attributes.hashCode());
result = prime * result + order;
result = prime * result +
((ruleName == null) ? 0 : ruleName.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Rule other = (Rule) obj;
if (attributes == null) {
if (other.attributes != null) {
return false;
}
}
else if (!attributes.equals(other.attributes)) {
return false;
}
if (order != other.order) {
return false;
}
if (ruleName == null) {
if (other.ruleName != null) {
return false;
}
}
else if (!ruleName.equals(other.ruleName)) {
return false;
}
return true;
}
public String toString() {
return "'" + ruleName + "':" + order + ":" + attributes.toString();
}
}
}
| false
| true
|
public List<Pool> selectBestPools(Consumer consumer, String[] productIds,
List<Pool> pools) {
Invocable inv = (Invocable) jsEngine;
ReadOnlyProductCache productCache = new ReadOnlyProductCache(prodAdapter);
log.info("Selecting best entitlement pool for product: " + Arrays.toString(productIds));
List<ReadOnlyPool> readOnlyPools = ReadOnlyPool.fromCollection(pools, productCache);
List<Product> products = new LinkedList<Product>();
Set<Rule> matchingRules = new HashSet<Rule>();
for (String productId : productIds) {
Product product = prodAdapter.getProductById(productId);
products.add(product);
Map<String, String> allAttributes = getFlattenedAttributes(product, null);
matchingRules.addAll(rulesForAttributes(allAttributes.keySet(),
attributesToRules));
}
Set<ReadOnlyProduct> readOnlyProducts = ReadOnlyProduct.fromProducts(products);
productCache.addProducts(readOnlyProducts);
// Provide objects for the script:
jsEngine.put("pools", readOnlyPools.toArray());
jsEngine.put("products", readOnlyProducts.toArray());
jsEngine.put("log", log);
ReadOnlyPool[] result = null;
boolean foundMatchingRule = false;
for (Rule rule : matchingRules) {
try {
Object output = inv.invokeMethod(entitlementNameSpace,
SELECT_POOL_PREFIX + rule.getRuleName());
result = (ReadOnlyPool[]) inv.invokeFunction("convertArray",
org.fedoraproject.candlepin.policy.js.ReadOnlyPool.class,
output);
foundMatchingRule = true;
log.info("Excuted javascript rule: " + SELECT_POOL_PREFIX +
rule.getRuleName());
break;
}
catch (NoSuchMethodException e) {
// continue on to the next rule in the list.
}
catch (ScriptException e) {
throw new RuleExecutionException(e);
}
}
if (!foundMatchingRule) {
try {
Object output = inv.invokeMethod(entitlementNameSpace,
GLOBAL_SELECT_POOL_FUNCTION);
result = (ReadOnlyPool[]) inv.invokeFunction("convertArray",
org.fedoraproject.candlepin.policy.js.ReadOnlyPool.class,
output);
log.info("Excuted javascript rule: " +
GLOBAL_SELECT_POOL_FUNCTION);
}
catch (NoSuchMethodException ex) {
log.warn("No default rule found: " +
GLOBAL_SELECT_POOL_FUNCTION);
log.warn("Resorting to default pool selection behavior.");
return selectBestPoolDefault(pools);
}
catch (ScriptException ex) {
throw new RuleExecutionException(ex);
}
}
if (pools.size() > 0 && result == null) {
throw new RuleExecutionException(
"Rule did not select a pool for products: " + Arrays.toString(productIds));
}
List<Pool> bestPools = new LinkedList<Pool>();
for (Pool p : pools) {
for (ReadOnlyPool rp : result) {
rp.getId();
p.getId().equals("foo");
if (p.getId().equals(rp.getId())) {
log.debug("Best pool: " + p);
bestPools.add(p);
break;
}
}
}
if (bestPools.size() > 0) {
return bestPools;
}
else {
return null;
}
}
|
public List<Pool> selectBestPools(Consumer consumer, String[] productIds,
List<Pool> pools) {
Invocable inv = (Invocable) jsEngine;
ReadOnlyProductCache productCache = new ReadOnlyProductCache(prodAdapter);
log.info("Selecting best entitlement pool for product: " +
Arrays.toString(productIds));
List<ReadOnlyPool> readOnlyPools = ReadOnlyPool.fromCollection(pools, productCache);
List<Product> products = new LinkedList<Product>();
Set<Rule> matchingRules = new HashSet<Rule>();
for (String productId : productIds) {
Product product = prodAdapter.getProductById(productId);
products.add(product);
Map<String, String> allAttributes = getFlattenedAttributes(product, null);
matchingRules.addAll(rulesForAttributes(allAttributes.keySet(),
attributesToRules));
}
Set<ReadOnlyProduct> readOnlyProducts = ReadOnlyProduct.fromProducts(products);
productCache.addProducts(readOnlyProducts);
// Provide objects for the script:
jsEngine.put("pools", readOnlyPools.toArray());
jsEngine.put("products", readOnlyProducts.toArray());
jsEngine.put("log", log);
ReadOnlyPool[] result = null;
boolean foundMatchingRule = false;
for (Rule rule : matchingRules) {
try {
Object output = inv.invokeMethod(entitlementNameSpace,
SELECT_POOL_PREFIX + rule.getRuleName());
result = (ReadOnlyPool[]) inv.invokeFunction("convertArray",
org.fedoraproject.candlepin.policy.js.ReadOnlyPool.class,
output);
foundMatchingRule = true;
log.info("Excuted javascript rule: " + SELECT_POOL_PREFIX +
rule.getRuleName());
break;
}
catch (NoSuchMethodException e) {
// continue on to the next rule in the list.
}
catch (ScriptException e) {
throw new RuleExecutionException(e);
}
}
if (!foundMatchingRule) {
try {
Object output = inv.invokeMethod(entitlementNameSpace,
GLOBAL_SELECT_POOL_FUNCTION);
result = (ReadOnlyPool[]) inv.invokeFunction("convertArray",
org.fedoraproject.candlepin.policy.js.ReadOnlyPool.class,
output);
log.info("Excuted javascript rule: " +
GLOBAL_SELECT_POOL_FUNCTION);
}
catch (NoSuchMethodException ex) {
log.warn("No default rule found: " +
GLOBAL_SELECT_POOL_FUNCTION);
log.warn("Resorting to default pool selection behavior.");
return selectBestPoolDefault(pools);
}
catch (ScriptException ex) {
throw new RuleExecutionException(ex);
}
}
if (pools.size() > 0 && result == null) {
throw new RuleExecutionException(
"Rule did not select a pool for products: " + Arrays.toString(productIds));
}
List<Pool> bestPools = new LinkedList<Pool>();
for (Pool p : pools) {
for (ReadOnlyPool rp : result) {
rp.getId();
p.getId().equals("foo");
if (p.getId().equals(rp.getId())) {
log.debug("Best pool: " + p);
bestPools.add(p);
break;
}
}
}
if (bestPools.size() > 0) {
return bestPools;
}
else {
return null;
}
}
|
diff --git a/src/main/java/de/cosmocode/palava/ipc/session/infinispan/SessionProvider.java b/src/main/java/de/cosmocode/palava/ipc/session/infinispan/SessionProvider.java
index 1fe6d18..bb79263 100644
--- a/src/main/java/de/cosmocode/palava/ipc/session/infinispan/SessionProvider.java
+++ b/src/main/java/de/cosmocode/palava/ipc/session/infinispan/SessionProvider.java
@@ -1,226 +1,227 @@
/**
* Copyright 2010 CosmoCode GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.cosmocode.palava.ipc.session.infinispan;
import java.util.UUID;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.infinispan.Cache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
import de.cosmocode.palava.concurrent.BackgroundScheduler;
import de.cosmocode.palava.core.Registry;
import de.cosmocode.palava.core.lifecycle.Disposable;
import de.cosmocode.palava.core.lifecycle.Initializable;
import de.cosmocode.palava.core.lifecycle.LifecycleException;
import de.cosmocode.palava.ipc.IpcConnection;
import de.cosmocode.palava.ipc.IpcConnectionDestroyEvent;
import de.cosmocode.palava.ipc.IpcSession;
import de.cosmocode.palava.ipc.IpcSessionConfig;
import de.cosmocode.palava.ipc.IpcSessionNotAttachedException;
import de.cosmocode.palava.ipc.IpcSessionProvider;
import de.cosmocode.palava.ipc.session.infinispan.Session.Key;
import de.cosmocode.palava.jmx.MBeanService;
/**
* Session provider baced by a {@link Cache}.
*
* @author Tobias Sarnowski
*/
@Singleton
final class SessionProvider implements IpcSessionProvider, Initializable, Runnable,
IpcConnectionDestroyEvent, Disposable, SessionProviderMBean {
private static final Logger LOG = LoggerFactory.getLogger(SessionProvider.class);
private final Cache<Key, IpcSession> cache;
private final Registry registry;
private final MBeanService mBeanService;
private final ScheduledExecutorService scheduler;
private long initialCheckDelay = 1;
private long checkPeriod = 15;
private TimeUnit checkPeriodUnit = TimeUnit.MINUTES;
private final long expirationTime;
private final TimeUnit expirationTimeUnit;
@Inject
@SuppressWarnings("unchecked")
public SessionProvider(
Registry registry,
MBeanService mBeanService,
- @SessionCache Cache<?, ?> cache,
+ // don't use generics here, will break injection
+ @SuppressWarnings("rawtypes") @SessionCache Cache cache,
@BackgroundScheduler ScheduledExecutorService scheduler,
@Named(IpcSessionConfig.EXPIRATION_TIME) long time,
@Named(IpcSessionConfig.EXPIRATION_TIME_UNIT) TimeUnit timeUnit) {
this.registry = Preconditions.checkNotNull(registry, "Registry");
this.mBeanService = Preconditions.checkNotNull(mBeanService, "MBeanService");
this.cache = (Cache<Session.Key, IpcSession>) Preconditions.checkNotNull(cache, "Cache");
this.scheduler = Preconditions.checkNotNull(scheduler, "Scheduler");
this.expirationTime = time;
this.expirationTimeUnit = Preconditions.checkNotNull(timeUnit, "TimeUnit");
}
@Inject(optional = true)
void setInitialCheckDelay(@Named(InfinispanSessionConfig.INITIAL_CHECK_DELAY) long initialCheckDelay) {
this.initialCheckDelay = initialCheckDelay;
}
@Inject(optional = true)
void setCheckPeriod(@Named(InfinispanSessionConfig.CHECK_PERIOD) long checkPeriod) {
this.checkPeriod = checkPeriod;
}
@Inject(optional = true)
void setCheckPeriodUnit(@Named(InfinispanSessionConfig.CHECK_PERIOD_UNIT) TimeUnit checkPeriodUnit) {
this.checkPeriodUnit = Preconditions.checkNotNull(checkPeriodUnit, "CheckPeriodUnit");
}
@Override
public void initialize() throws LifecycleException {
registry.register(IpcConnectionDestroyEvent.class, this);
final String unit = checkPeriodUnit.name().toLowerCase();
LOG.info("Scheduling {} in {} {} and then periodically every {} {}", new Object[] {
this, initialCheckDelay, unit, checkPeriod, unit
});
scheduler.scheduleAtFixedRate(this, initialCheckDelay, checkPeriod, checkPeriodUnit);
mBeanService.register(this);
}
@Override
public IpcSession getSession(String sessionId, String identifier) {
IpcSession session = cache.get(new Key(sessionId, identifier));
if (session != null && session.isExpired()) {
expireSession(session);
session = null;
}
if (session == null) {
session = new Session(UUID.randomUUID().toString(), identifier, expirationTime, expirationTimeUnit);
LOG.info("Created {}", session);
}
return session;
}
@Override
public void run() {
for (IpcSession session : cache.values()) {
if (session.isExpired()) {
expireSession(session);
}
}
}
private void expireSession(IpcSession session) {
LOG.info("Expiring {}", session);
try {
cache.removeAsync(Key.get(session));
} finally {
session.clear();
}
}
@Override
public void eventIpcConnectionDestroy(IpcConnection connection) {
final IpcSession session;
try {
session = connection.getSession();
} catch (IpcSessionNotAttachedException e) {
return;
}
cache.put(Key.get(session), session);
}
@Override
public int getCurrentNumberOfEntries() {
return cache.getAdvancedCache().getStats().getCurrentNumberOfEntries();
}
@Override
public long getEvictions() {
return cache.getAdvancedCache().getStats().getEvictions();
}
@Override
public long getHits() {
return cache.getAdvancedCache().getStats().getHits();
}
@Override
public long getMisses() {
return cache.getAdvancedCache().getStats().getMisses();
}
@Override
public long getRemoveHits() {
return cache.getAdvancedCache().getStats().getRemoveHits();
}
@Override
public long getRemoveMisses() {
return cache.getAdvancedCache().getStats().getRemoveMisses();
}
@Override
public long getRetrievals() {
return cache.getAdvancedCache().getStats().getRetrievals();
}
@Override
public long getStores() {
return cache.getAdvancedCache().getStats().getStores();
}
@Override
public long getTimeSinceStart() {
return cache.getAdvancedCache().getStats().getTimeSinceStart();
}
@Override
public long getTotalNumberOfEntries() {
return cache.getAdvancedCache().getStats().getTotalNumberOfEntries();
}
@Override
public void dispose() throws LifecycleException {
try {
mBeanService.unregister(this);
} finally {
registry.remove(this);
}
}
@Override
public String toString() {
return "SessionProvider {" + "cache=" + cache + '}';
}
}
| true
| true
|
public SessionProvider(
Registry registry,
MBeanService mBeanService,
@SessionCache Cache<?, ?> cache,
@BackgroundScheduler ScheduledExecutorService scheduler,
@Named(IpcSessionConfig.EXPIRATION_TIME) long time,
@Named(IpcSessionConfig.EXPIRATION_TIME_UNIT) TimeUnit timeUnit) {
this.registry = Preconditions.checkNotNull(registry, "Registry");
this.mBeanService = Preconditions.checkNotNull(mBeanService, "MBeanService");
this.cache = (Cache<Session.Key, IpcSession>) Preconditions.checkNotNull(cache, "Cache");
this.scheduler = Preconditions.checkNotNull(scheduler, "Scheduler");
this.expirationTime = time;
this.expirationTimeUnit = Preconditions.checkNotNull(timeUnit, "TimeUnit");
}
|
public SessionProvider(
Registry registry,
MBeanService mBeanService,
// don't use generics here, will break injection
@SuppressWarnings("rawtypes") @SessionCache Cache cache,
@BackgroundScheduler ScheduledExecutorService scheduler,
@Named(IpcSessionConfig.EXPIRATION_TIME) long time,
@Named(IpcSessionConfig.EXPIRATION_TIME_UNIT) TimeUnit timeUnit) {
this.registry = Preconditions.checkNotNull(registry, "Registry");
this.mBeanService = Preconditions.checkNotNull(mBeanService, "MBeanService");
this.cache = (Cache<Session.Key, IpcSession>) Preconditions.checkNotNull(cache, "Cache");
this.scheduler = Preconditions.checkNotNull(scheduler, "Scheduler");
this.expirationTime = time;
this.expirationTimeUnit = Preconditions.checkNotNull(timeUnit, "TimeUnit");
}
|
diff --git a/src/java/org/infoglue/cms/applications/managementtool/actions/DeleteRepositoryAction.java b/src/java/org/infoglue/cms/applications/managementtool/actions/DeleteRepositoryAction.java
index 8fb9deb2c..2d164847d 100755
--- a/src/java/org/infoglue/cms/applications/managementtool/actions/DeleteRepositoryAction.java
+++ b/src/java/org/infoglue/cms/applications/managementtool/actions/DeleteRepositoryAction.java
@@ -1,71 +1,71 @@
/* ===============================================================================
*
* Part of the InfoGlue Content Management Platform (www.infoglue.org)
*
* ===============================================================================
*
* Copyright (C)
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License version 2, as published by the
* Free Software Foundation. See the file LICENSE.html for more information.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY, including the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc. / 59 Temple
* Place, Suite 330 / Boston, MA 02111-1307 / USA.
*
* ===============================================================================
*/
package org.infoglue.cms.applications.managementtool.actions;
import org.infoglue.cms.applications.common.actions.WebworkAbstractAction;
import org.infoglue.cms.controllers.kernel.impl.simple.*;
import org.infoglue.cms.entities.management.RepositoryVO;
import org.infoglue.cms.exception.*;
/**
* This action removes a repository from the system.
*
* @author Mattias Bogeblad
*/
public class DeleteRepositoryAction extends WebworkAbstractAction
{
private RepositoryVO repositoryVO;
private Integer repositoryId;
public DeleteRepositoryAction()
{
this(new RepositoryVO());
}
public DeleteRepositoryAction(RepositoryVO repositoryVO)
{
this.repositoryVO = repositoryVO;
}
- protected String doExecute() throws Exception
+ protected String doExecute() throws ConstraintException, Exception
{
this.repositoryVO.setRepositoryId(this.getRepositoryId());
RepositoryController.getController().delete(this.repositoryVO, this.getInfoGluePrincipal().getName());
return "success";
}
public void setRepositoryId(Integer repositoryId) throws SystemException
{
this.repositoryVO.setRepositoryId(repositoryId);
}
public java.lang.Integer getRepositoryId()
{
return this.repositoryVO.getRepositoryId();
}
}
| true
| true
|
protected String doExecute() throws Exception
{
this.repositoryVO.setRepositoryId(this.getRepositoryId());
RepositoryController.getController().delete(this.repositoryVO, this.getInfoGluePrincipal().getName());
return "success";
}
|
protected String doExecute() throws ConstraintException, Exception
{
this.repositoryVO.setRepositoryId(this.getRepositoryId());
RepositoryController.getController().delete(this.repositoryVO, this.getInfoGluePrincipal().getName());
return "success";
}
|
diff --git a/WEB-INF/src/edu/wustl/query/action/WorkflowAjaxHandlerAction.java b/WEB-INF/src/edu/wustl/query/action/WorkflowAjaxHandlerAction.java
index 4f6fe8b3..1aff450c 100644
--- a/WEB-INF/src/edu/wustl/query/action/WorkflowAjaxHandlerAction.java
+++ b/WEB-INF/src/edu/wustl/query/action/WorkflowAjaxHandlerAction.java
@@ -1,339 +1,340 @@
package edu.wustl.query.action;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts.action.Action;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.json.JSONException;
import org.json.JSONObject;
import edu.wustl.cider.query.CiderWorkFlowDetails;
import edu.wustl.common.beans.SessionDataBean;
import edu.wustl.common.dao.AbstractDAO;
import edu.wustl.common.dao.DAOFactory;
import edu.wustl.common.query.factory.AbstractQueryManagerFactory;
import edu.wustl.common.query.factory.AbstractQueryUIManagerFactory;
import edu.wustl.common.util.logger.Logger;
import edu.wustl.query.bizlogic.BizLogicFactory;
import edu.wustl.query.bizlogic.WorkflowBizLogic;
import edu.wustl.query.domain.Workflow;
import edu.wustl.query.querymanager.AbstractQueryManager;
import edu.wustl.query.querymanager.Count;
import edu.wustl.query.util.global.Constants;
import edu.wustl.query.util.querysuite.AbstractQueryUIManager;
import edu.wustl.query.util.querysuite.QueryModuleException;
/**
* @author niharika_sharma
*
*/
public class WorkflowAjaxHandlerAction extends Action
{
/* (non-Javadoc)
* @see org.apache.struts.action.Action#execute(org.apache.struts.action.ActionMapping,
* org.apache.struts.action.ActionForm, javax.servlet.http.HttpServletRequest
* , javax.servlet.http. HttpServletResponse)
*/
@Override
public ActionForward execute(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response) throws Exception
{
WorkflowBizLogic workflowBizLogic=new WorkflowBizLogic();
//for saving workflow when click execute
// Get the Query information
Long queryId=(long)-1;
String queryIdStr = request.getParameter("queryId");
if (queryIdStr != null && !"".equals(queryIdStr.trim()))
{
queryId=Long.valueOf(queryIdStr);
}
//String queryTitle=request.getParameter("queryTitle");
String operation = request.getParameter(Constants.OPERATION);
String state = request.getParameter("state");
Writer writer = response.getWriter();
if (operation != null && "execute".equals(operation.trim()))
{
// Fetch the current workflow
String workflowId=request.getParameter("workflowId");
Workflow workflow = null;
AbstractDAO dao = DAOFactory.getInstance().getDAO(Constants.HIBERNATE_DAO);
dao.openSession(null);
try
{
// Get the workflow
workflow = (Workflow) dao.retrieve(Workflow.class.getName(),
Long.valueOf(workflowId));
// get the current selected projectId
int project_id = 0;
if (request.getParameter(Constants.SELECTED_PROJECT) != null
&& !(request.getParameter(Constants.SELECTED_PROJECT))
.equals(""))
{
project_id = (Integer.valueOf((request
.getParameter(Constants.SELECTED_PROJECT)
.toString())));
}
// Get the current User Id
SessionDataBean sessionData = (SessionDataBean) request
.getSession().getAttribute(Constants.SESSION_DATA);
Long userId = sessionData.getUserId();
// Create a workflow details object
// FIXME - CiderWorkFlowdetails cannot be accessed from here.
// Need a
// good enough way to create the workflow details object
// specific to
// Cider and AdvancedQuery.
CiderWorkFlowDetails workflowdetails = new CiderWorkFlowDetails(
project_id, userId.intValue(), workflow);
workflowBizLogic = (WorkflowBizLogic) BizLogicFactory
.getInstance().getBizLogic(
Constants.WORKFLOW_BIZLOGIC_ID);
String queryExecIdStr = request.getParameter("executionLogId");
int queryExecId = -1;
if (queryExecIdStr != null && !"".equals(queryExecIdStr.trim())) {
queryExecId = Integer.valueOf(queryExecIdStr);
}
// TO DO
// WorkflowBizLogic--->executeGetCountQuery
// WorkflowBizLogic--->getCount
List<JSONObject> executionQueryResults = new ArrayList<JSONObject>();
if (state != null && state.equals("cancel"))
{
JSONObject resultObject = null;
resultObject = new JSONObject();
resultObject.append("queryId", queryId);
try
{
if (request.getParameter("removeExecutedCount").equals(
"true"))
{
resultObject.put("removeExecutedCount",
"removeExecutedCount");
}
AbstractQueryManager qManager = AbstractQueryManagerFactory
.getDefaultAbstractQueryManager();
qManager.cancel(queryExecId);
// Count
// resultCount=workflowBizLogic.getCount(queryExecId);
response.setContentType(Constants.CONTENT_TYPE_TEXT);
writer.write(resultObject.toString());
} catch (Exception e)
{
resultObject.append("execption", "execption");
// resultObject.append("executionLogId", qu>>>>>> .r5829
response.setContentType(Constants.CONTENT_TYPE_TEXT);
writer.write(resultObject.toString());
}
}
else
{
Count resultCount = null;
JSONObject jsonObject = null;
AbstractQueryUIManager qUIManager = AbstractQueryUIManagerFactory
.getDefaultAbstractUIQueryManager();
// Get the executionType
String execType = request
.getParameter(Constants.REQ_ATTRIB_EXECUTION_TYPE);
if (execType != null
&& Constants.EXECUTION_TYPE_WORKFLOW
.equalsIgnoreCase(execType.trim()))
{
Map<Long, Integer> executionIdMap = workflowBizLogic
.runWorkflow(workflowdetails,
request);
- executionQueryResults
- .addAll(generateExecutionQueryResults(
+ executionQueryResults=
+ generateExecutionQueryResults(
executionIdMap, workflowBizLogic,
- qUIManager, project_id));
+ qUIManager, project_id);
}
else
{
// normal query execution
if (queryExecId == 0)
{
// queryExecId =
// workflowBizLogic.executeGetCountQuery(queryId,
// request);
Map<Long, Integer> executionIdMap = workflowBizLogic
.executeGetCountQuery(workflowdetails, queryId,
request);
- executionQueryResults
- .addAll(generateExecutionQueryResults(
+ executionQueryResults=
+ generateExecutionQueryResults(
executionIdMap, workflowBizLogic,
- qUIManager, project_id));
+ qUIManager, project_id);
} else
{
resultCount = workflowBizLogic.getCount(queryExecId);
// executionQueryResults.add(createResultJSON(queryId,
// resultCount.getCount(),
// resultCount.getStatus(), resultCount
// .getQueryExectionId()));
boolean hasFewRecords = false;
if (project_id > 0)
{
hasFewRecords = qUIManager.checkTooFewRecords(Long
.valueOf(project_id), resultCount);
}
if (hasFewRecords)
{
jsonObject = createResultJSON(queryId, 0,
resultCount.getStatus(), resultCount
.getQueryExectionId());
} else
{
jsonObject = createResultJSON(queryId, resultCount
.getCount(), resultCount.getStatus(),
resultCount.getQueryExectionId());
}
executionQueryResults.add(jsonObject);
}
}
response.setContentType(Constants.CONTENT_TYPE_TEXT);
writer.write(new JSONObject().put("executionQueryResults",
executionQueryResults).toString());
}
}
catch (Exception e) {
Logger.out.debug(e.getMessage(),e);
+ e.printStackTrace();
try {
response.setContentType("text/xml");
writer.write(Constants.QUERY_EXCEPTION);
}
catch (IOException e1) {
Logger.out.debug(e1.getMessage(),e1);
}
}
finally
{
dao.closeSession();
}
}
return null;
}
/**
* Private method used to generate the List of JSON objects.
*
* @param executionIdMap
* Execution Id Map
* @param workflowBizLogic
* Instance of BizLogic to be used.
* @param qUIManager
* Instance of the Query UI Manager.
* @param projectId
* Project Id
* @return The List of JSON Objects
* @throws QueryModuleException
* if error while executing the query.
*/
private List<JSONObject> generateExecutionQueryResults(Map<Long, Integer> executionIdMap,
WorkflowBizLogic workflowBizLogic, AbstractQueryUIManager qUIManager,
int projectId) throws QueryModuleException
{
Count resultCount = null;
JSONObject jsonObject = null;
List<JSONObject> executionQueryResults = new ArrayList<JSONObject>();
Set<Long> titleset = executionIdMap.keySet();
Iterator<Long> iterator = titleset.iterator();
while (iterator.hasNext())
{
Long query = iterator.next();
resultCount = workflowBizLogic
.getCount(executionIdMap.get(query));
// executionQueryResults.add(createResultJSON(query,
// resultCount.getCount(), resultCount
// .getStatus(), resultCount
// .getQueryExectionId()));
boolean hasFewRecords = false;
if (projectId > 0)
{
hasFewRecords = qUIManager.checkTooFewRecords(
Long.valueOf(projectId), resultCount);
}
if (hasFewRecords)
{
jsonObject = createResultJSON(query, 0,
resultCount.getStatus(), resultCount
.getQueryExectionId());
} else
{
jsonObject = createResultJSON(query,
resultCount.getCount(), resultCount
.getStatus(), resultCount
.getQueryExectionId());
}
executionQueryResults.add(jsonObject);
}
return executionQueryResults;
}
/**
* @param queryId =Query identifier for which execute request sent
* @param errormessage
* @param workflowId
* @param queryIndex=row number where results to be displayed
* @param resultCount=value of result count for query
* @returns jsonObject
*
* creates the jsonObject for input parameters
*/
private JSONObject createResultJSON(Long queryId, int resultCount,
String status, int executionLogId)
{
JSONObject resultObject = null;
resultObject = new JSONObject();
try
{
resultObject.append("queryId", queryId);
resultObject.append("queryResult", resultCount);
resultObject.append("status", status);
resultObject.append("executionLogId", executionLogId);
}
catch (JSONException e)
{
Logger.out.info("error in initializing json object " + e);
}
return resultObject;
}
}
| false
| true
|
public ActionForward execute(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response) throws Exception
{
WorkflowBizLogic workflowBizLogic=new WorkflowBizLogic();
//for saving workflow when click execute
// Get the Query information
Long queryId=(long)-1;
String queryIdStr = request.getParameter("queryId");
if (queryIdStr != null && !"".equals(queryIdStr.trim()))
{
queryId=Long.valueOf(queryIdStr);
}
//String queryTitle=request.getParameter("queryTitle");
String operation = request.getParameter(Constants.OPERATION);
String state = request.getParameter("state");
Writer writer = response.getWriter();
if (operation != null && "execute".equals(operation.trim()))
{
// Fetch the current workflow
String workflowId=request.getParameter("workflowId");
Workflow workflow = null;
AbstractDAO dao = DAOFactory.getInstance().getDAO(Constants.HIBERNATE_DAO);
dao.openSession(null);
try
{
// Get the workflow
workflow = (Workflow) dao.retrieve(Workflow.class.getName(),
Long.valueOf(workflowId));
// get the current selected projectId
int project_id = 0;
if (request.getParameter(Constants.SELECTED_PROJECT) != null
&& !(request.getParameter(Constants.SELECTED_PROJECT))
.equals(""))
{
project_id = (Integer.valueOf((request
.getParameter(Constants.SELECTED_PROJECT)
.toString())));
}
// Get the current User Id
SessionDataBean sessionData = (SessionDataBean) request
.getSession().getAttribute(Constants.SESSION_DATA);
Long userId = sessionData.getUserId();
// Create a workflow details object
// FIXME - CiderWorkFlowdetails cannot be accessed from here.
// Need a
// good enough way to create the workflow details object
// specific to
// Cider and AdvancedQuery.
CiderWorkFlowDetails workflowdetails = new CiderWorkFlowDetails(
project_id, userId.intValue(), workflow);
workflowBizLogic = (WorkflowBizLogic) BizLogicFactory
.getInstance().getBizLogic(
Constants.WORKFLOW_BIZLOGIC_ID);
String queryExecIdStr = request.getParameter("executionLogId");
int queryExecId = -1;
if (queryExecIdStr != null && !"".equals(queryExecIdStr.trim())) {
queryExecId = Integer.valueOf(queryExecIdStr);
}
// TO DO
// WorkflowBizLogic--->executeGetCountQuery
// WorkflowBizLogic--->getCount
List<JSONObject> executionQueryResults = new ArrayList<JSONObject>();
if (state != null && state.equals("cancel"))
{
JSONObject resultObject = null;
resultObject = new JSONObject();
resultObject.append("queryId", queryId);
try
{
if (request.getParameter("removeExecutedCount").equals(
"true"))
{
resultObject.put("removeExecutedCount",
"removeExecutedCount");
}
AbstractQueryManager qManager = AbstractQueryManagerFactory
.getDefaultAbstractQueryManager();
qManager.cancel(queryExecId);
// Count
// resultCount=workflowBizLogic.getCount(queryExecId);
response.setContentType(Constants.CONTENT_TYPE_TEXT);
writer.write(resultObject.toString());
} catch (Exception e)
{
resultObject.append("execption", "execption");
// resultObject.append("executionLogId", qu>>>>>> .r5829
response.setContentType(Constants.CONTENT_TYPE_TEXT);
writer.write(resultObject.toString());
}
}
else
{
Count resultCount = null;
JSONObject jsonObject = null;
AbstractQueryUIManager qUIManager = AbstractQueryUIManagerFactory
.getDefaultAbstractUIQueryManager();
// Get the executionType
String execType = request
.getParameter(Constants.REQ_ATTRIB_EXECUTION_TYPE);
if (execType != null
&& Constants.EXECUTION_TYPE_WORKFLOW
.equalsIgnoreCase(execType.trim()))
{
Map<Long, Integer> executionIdMap = workflowBizLogic
.runWorkflow(workflowdetails,
request);
executionQueryResults
.addAll(generateExecutionQueryResults(
executionIdMap, workflowBizLogic,
qUIManager, project_id));
}
else
{
// normal query execution
if (queryExecId == 0)
{
// queryExecId =
// workflowBizLogic.executeGetCountQuery(queryId,
// request);
Map<Long, Integer> executionIdMap = workflowBizLogic
.executeGetCountQuery(workflowdetails, queryId,
request);
executionQueryResults
.addAll(generateExecutionQueryResults(
executionIdMap, workflowBizLogic,
qUIManager, project_id));
} else
{
resultCount = workflowBizLogic.getCount(queryExecId);
// executionQueryResults.add(createResultJSON(queryId,
// resultCount.getCount(),
// resultCount.getStatus(), resultCount
// .getQueryExectionId()));
boolean hasFewRecords = false;
if (project_id > 0)
{
hasFewRecords = qUIManager.checkTooFewRecords(Long
.valueOf(project_id), resultCount);
}
if (hasFewRecords)
{
jsonObject = createResultJSON(queryId, 0,
resultCount.getStatus(), resultCount
.getQueryExectionId());
} else
{
jsonObject = createResultJSON(queryId, resultCount
.getCount(), resultCount.getStatus(),
resultCount.getQueryExectionId());
}
executionQueryResults.add(jsonObject);
}
}
response.setContentType(Constants.CONTENT_TYPE_TEXT);
writer.write(new JSONObject().put("executionQueryResults",
executionQueryResults).toString());
}
}
catch (Exception e) {
Logger.out.debug(e.getMessage(),e);
try {
response.setContentType("text/xml");
writer.write(Constants.QUERY_EXCEPTION);
}
catch (IOException e1) {
Logger.out.debug(e1.getMessage(),e1);
}
}
finally
{
dao.closeSession();
}
}
return null;
}
|
public ActionForward execute(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response) throws Exception
{
WorkflowBizLogic workflowBizLogic=new WorkflowBizLogic();
//for saving workflow when click execute
// Get the Query information
Long queryId=(long)-1;
String queryIdStr = request.getParameter("queryId");
if (queryIdStr != null && !"".equals(queryIdStr.trim()))
{
queryId=Long.valueOf(queryIdStr);
}
//String queryTitle=request.getParameter("queryTitle");
String operation = request.getParameter(Constants.OPERATION);
String state = request.getParameter("state");
Writer writer = response.getWriter();
if (operation != null && "execute".equals(operation.trim()))
{
// Fetch the current workflow
String workflowId=request.getParameter("workflowId");
Workflow workflow = null;
AbstractDAO dao = DAOFactory.getInstance().getDAO(Constants.HIBERNATE_DAO);
dao.openSession(null);
try
{
// Get the workflow
workflow = (Workflow) dao.retrieve(Workflow.class.getName(),
Long.valueOf(workflowId));
// get the current selected projectId
int project_id = 0;
if (request.getParameter(Constants.SELECTED_PROJECT) != null
&& !(request.getParameter(Constants.SELECTED_PROJECT))
.equals(""))
{
project_id = (Integer.valueOf((request
.getParameter(Constants.SELECTED_PROJECT)
.toString())));
}
// Get the current User Id
SessionDataBean sessionData = (SessionDataBean) request
.getSession().getAttribute(Constants.SESSION_DATA);
Long userId = sessionData.getUserId();
// Create a workflow details object
// FIXME - CiderWorkFlowdetails cannot be accessed from here.
// Need a
// good enough way to create the workflow details object
// specific to
// Cider and AdvancedQuery.
CiderWorkFlowDetails workflowdetails = new CiderWorkFlowDetails(
project_id, userId.intValue(), workflow);
workflowBizLogic = (WorkflowBizLogic) BizLogicFactory
.getInstance().getBizLogic(
Constants.WORKFLOW_BIZLOGIC_ID);
String queryExecIdStr = request.getParameter("executionLogId");
int queryExecId = -1;
if (queryExecIdStr != null && !"".equals(queryExecIdStr.trim())) {
queryExecId = Integer.valueOf(queryExecIdStr);
}
// TO DO
// WorkflowBizLogic--->executeGetCountQuery
// WorkflowBizLogic--->getCount
List<JSONObject> executionQueryResults = new ArrayList<JSONObject>();
if (state != null && state.equals("cancel"))
{
JSONObject resultObject = null;
resultObject = new JSONObject();
resultObject.append("queryId", queryId);
try
{
if (request.getParameter("removeExecutedCount").equals(
"true"))
{
resultObject.put("removeExecutedCount",
"removeExecutedCount");
}
AbstractQueryManager qManager = AbstractQueryManagerFactory
.getDefaultAbstractQueryManager();
qManager.cancel(queryExecId);
// Count
// resultCount=workflowBizLogic.getCount(queryExecId);
response.setContentType(Constants.CONTENT_TYPE_TEXT);
writer.write(resultObject.toString());
} catch (Exception e)
{
resultObject.append("execption", "execption");
// resultObject.append("executionLogId", qu>>>>>> .r5829
response.setContentType(Constants.CONTENT_TYPE_TEXT);
writer.write(resultObject.toString());
}
}
else
{
Count resultCount = null;
JSONObject jsonObject = null;
AbstractQueryUIManager qUIManager = AbstractQueryUIManagerFactory
.getDefaultAbstractUIQueryManager();
// Get the executionType
String execType = request
.getParameter(Constants.REQ_ATTRIB_EXECUTION_TYPE);
if (execType != null
&& Constants.EXECUTION_TYPE_WORKFLOW
.equalsIgnoreCase(execType.trim()))
{
Map<Long, Integer> executionIdMap = workflowBizLogic
.runWorkflow(workflowdetails,
request);
executionQueryResults=
generateExecutionQueryResults(
executionIdMap, workflowBizLogic,
qUIManager, project_id);
}
else
{
// normal query execution
if (queryExecId == 0)
{
// queryExecId =
// workflowBizLogic.executeGetCountQuery(queryId,
// request);
Map<Long, Integer> executionIdMap = workflowBizLogic
.executeGetCountQuery(workflowdetails, queryId,
request);
executionQueryResults=
generateExecutionQueryResults(
executionIdMap, workflowBizLogic,
qUIManager, project_id);
} else
{
resultCount = workflowBizLogic.getCount(queryExecId);
// executionQueryResults.add(createResultJSON(queryId,
// resultCount.getCount(),
// resultCount.getStatus(), resultCount
// .getQueryExectionId()));
boolean hasFewRecords = false;
if (project_id > 0)
{
hasFewRecords = qUIManager.checkTooFewRecords(Long
.valueOf(project_id), resultCount);
}
if (hasFewRecords)
{
jsonObject = createResultJSON(queryId, 0,
resultCount.getStatus(), resultCount
.getQueryExectionId());
} else
{
jsonObject = createResultJSON(queryId, resultCount
.getCount(), resultCount.getStatus(),
resultCount.getQueryExectionId());
}
executionQueryResults.add(jsonObject);
}
}
response.setContentType(Constants.CONTENT_TYPE_TEXT);
writer.write(new JSONObject().put("executionQueryResults",
executionQueryResults).toString());
}
}
catch (Exception e) {
Logger.out.debug(e.getMessage(),e);
e.printStackTrace();
try {
response.setContentType("text/xml");
writer.write(Constants.QUERY_EXCEPTION);
}
catch (IOException e1) {
Logger.out.debug(e1.getMessage(),e1);
}
}
finally
{
dao.closeSession();
}
}
return null;
}
|
diff --git a/xutils/src/main/java/zcu/xutil/misc/ShiroFilterFactory.java b/xutils/src/main/java/zcu/xutil/misc/ShiroFilterFactory.java
index 021495d..eddf3d2 100644
--- a/xutils/src/main/java/zcu/xutil/misc/ShiroFilterFactory.java
+++ b/xutils/src/main/java/zcu/xutil/misc/ShiroFilterFactory.java
@@ -1,140 +1,140 @@
/*
* Copyright 2009 zaichu xiao
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package zcu.xutil.misc;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.Filter;
import org.apache.shiro.config.Ini;
import org.apache.shiro.util.CollectionUtils;
import org.apache.shiro.util.StringUtils;
import org.apache.shiro.web.config.IniFilterChainResolverFactory;
import org.apache.shiro.web.filter.AccessControlFilter;
import org.apache.shiro.web.filter.authc.AuthenticationFilter;
import org.apache.shiro.web.filter.authc.LogoutFilter;
import org.apache.shiro.web.filter.authz.AuthorizationFilter;
import org.apache.shiro.web.filter.mgt.DefaultFilterChainManager;
import org.apache.shiro.web.filter.mgt.FilterChainResolver;
import org.apache.shiro.web.filter.mgt.PathMatchingFilterChainResolver;
import org.apache.shiro.web.mgt.WebSecurityManager;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.web.servlet.AbstractShiroFilter;
import zcu.xutil.Objutil;
import zcu.xutil.cfg.CFG;
import zcu.xutil.cfg.Provider;
public class ShiroFilterFactory {
private SecurityManager securityManager;
private Map<String, Provider> filters = new HashMap<String, Provider>();
private String definitions, loginUrl, successUrl, unauthorizedUrl, logoutUrl;
public AbstractShiroFilter getShiroFilter() {
DefaultFilterChainManager manager = new DefaultFilterChainManager();
for (Filter filter : manager.getFilters().values())
applyGlobalPropertiesIfNecessary(filter);
for (Map.Entry<String, Provider> entry : filters.entrySet()) {
Filter filter = (Filter) entry.getValue().instance();
applyGlobalPropertiesIfNecessary(filter);
manager.addFilter(entry.getKey(), filter);
}
Ini ini = new Ini();
ini.load(definitions);
Ini.Section section = ini.getSection(IniFilterChainResolverFactory.URLS);
if (CollectionUtils.isEmpty(section))
section = ini.getSection(Ini.DEFAULT_SECTION_NAME);
for (Map.Entry<String, String> entry : section.entrySet())
manager.createChain(entry.getKey(), entry.getValue());
PathMatchingFilterChainResolver chainResolver = new PathMatchingFilterChainResolver();
chainResolver.setFilterChainManager(manager);
return new XSFilter((WebSecurityManager) securityManager, chainResolver);
}
public ShiroPlugin getShiroPlugin() {
ShiroPlugin ret = new ShiroPlugin();
if (StringUtils.hasText(unauthorizedUrl))
ret.setUnauthorizedUrl(unauthorizedUrl);
return ret;
}
public void setSecurityManager(SecurityManager securitymanager) {
this.securityManager = securitymanager;
}
public void setLoginUrl(String loginURL) {
this.loginUrl = loginURL;
}
public void setLogoutUrl(String logoutURL) {
this.logoutUrl = logoutURL;
}
public void setSuccessUrl(String successURL) {
this.successUrl = successURL;
}
public void setUnauthorizedUrl(String unauthorizedURL) {
this.unauthorizedUrl = unauthorizedURL;
}
public void addFilter(String name, String filterClass) {
addFilter(name, CFG.typ(Objutil.loadclass(filterClass)));
}
public void addFilter(String name, Provider filter) {
Class<?> type = filter.getType();
Objutil.validate(Filter.class.isAssignableFrom(type), "{} is not a Filter.", type);
filters.put(name, filter);
}
public void setFilterChainDefinitions(String definition) {
definitions = definition;
}
private void applyGlobalPropertiesIfNecessary(Filter filter) {
if (StringUtils.hasText(logoutUrl) && filter instanceof LogoutFilter) {
LogoutFilter logout = (LogoutFilter) filter;
if (LogoutFilter.DEFAULT_REDIRECT_URL.equals(logout.getRedirectUrl()))
logout.setRedirectUrl(logoutUrl);
}
if (StringUtils.hasText(loginUrl) && (filter instanceof AccessControlFilter)) {
AccessControlFilter access = (AccessControlFilter) filter;
if (AccessControlFilter.DEFAULT_LOGIN_URL.equals(access.getLoginUrl()))
access.setLoginUrl(loginUrl);
}
if (StringUtils.hasText(successUrl) && (filter instanceof AuthenticationFilter)) {
AuthenticationFilter authen = ((AuthenticationFilter) filter);
- if (AuthenticationFilter.DEFAULT_SUCCESS_URL.equals(authen.getLoginUrl()))
+ if (AuthenticationFilter.DEFAULT_SUCCESS_URL.equals(authen.getSuccessUrl()))
authen.setSuccessUrl(successUrl);
}
if (StringUtils.hasText(unauthorizedUrl) && (filter instanceof AuthorizationFilter)) {
AuthorizationFilter author = ((AuthorizationFilter) filter);
if (!StringUtils.hasText(author.getUnauthorizedUrl()))
author.setUnauthorizedUrl(unauthorizedUrl);
}
}
private static final class XSFilter extends AbstractShiroFilter {
XSFilter(WebSecurityManager webSecurityManager, FilterChainResolver resolver) {
setSecurityManager(Objutil.notNull(webSecurityManager, "WebSecurityManager property cannot be null."));
if (resolver != null)
setFilterChainResolver(resolver);
}
}
}
| true
| true
|
private void applyGlobalPropertiesIfNecessary(Filter filter) {
if (StringUtils.hasText(logoutUrl) && filter instanceof LogoutFilter) {
LogoutFilter logout = (LogoutFilter) filter;
if (LogoutFilter.DEFAULT_REDIRECT_URL.equals(logout.getRedirectUrl()))
logout.setRedirectUrl(logoutUrl);
}
if (StringUtils.hasText(loginUrl) && (filter instanceof AccessControlFilter)) {
AccessControlFilter access = (AccessControlFilter) filter;
if (AccessControlFilter.DEFAULT_LOGIN_URL.equals(access.getLoginUrl()))
access.setLoginUrl(loginUrl);
}
if (StringUtils.hasText(successUrl) && (filter instanceof AuthenticationFilter)) {
AuthenticationFilter authen = ((AuthenticationFilter) filter);
if (AuthenticationFilter.DEFAULT_SUCCESS_URL.equals(authen.getLoginUrl()))
authen.setSuccessUrl(successUrl);
}
if (StringUtils.hasText(unauthorizedUrl) && (filter instanceof AuthorizationFilter)) {
AuthorizationFilter author = ((AuthorizationFilter) filter);
if (!StringUtils.hasText(author.getUnauthorizedUrl()))
author.setUnauthorizedUrl(unauthorizedUrl);
}
}
|
private void applyGlobalPropertiesIfNecessary(Filter filter) {
if (StringUtils.hasText(logoutUrl) && filter instanceof LogoutFilter) {
LogoutFilter logout = (LogoutFilter) filter;
if (LogoutFilter.DEFAULT_REDIRECT_URL.equals(logout.getRedirectUrl()))
logout.setRedirectUrl(logoutUrl);
}
if (StringUtils.hasText(loginUrl) && (filter instanceof AccessControlFilter)) {
AccessControlFilter access = (AccessControlFilter) filter;
if (AccessControlFilter.DEFAULT_LOGIN_URL.equals(access.getLoginUrl()))
access.setLoginUrl(loginUrl);
}
if (StringUtils.hasText(successUrl) && (filter instanceof AuthenticationFilter)) {
AuthenticationFilter authen = ((AuthenticationFilter) filter);
if (AuthenticationFilter.DEFAULT_SUCCESS_URL.equals(authen.getSuccessUrl()))
authen.setSuccessUrl(successUrl);
}
if (StringUtils.hasText(unauthorizedUrl) && (filter instanceof AuthorizationFilter)) {
AuthorizationFilter author = ((AuthorizationFilter) filter);
if (!StringUtils.hasText(author.getUnauthorizedUrl()))
author.setUnauthorizedUrl(unauthorizedUrl);
}
}
|
diff --git a/tests/src/java/org/apache/log4j/util/EnhancedJunitTestRunnerFilter.java b/tests/src/java/org/apache/log4j/util/EnhancedJunitTestRunnerFilter.java
index d3c9cb0f..af85c3f5 100644
--- a/tests/src/java/org/apache/log4j/util/EnhancedJunitTestRunnerFilter.java
+++ b/tests/src/java/org/apache/log4j/util/EnhancedJunitTestRunnerFilter.java
@@ -1,66 +1,66 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.log4j.util;
import org.apache.oro.text.perl.Perl5Util;
public class EnhancedJunitTestRunnerFilter implements Filter {
private Perl5Util util = new Perl5Util();
private static final String[] PATTERNS = {
"at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner",
"at org.apache.tools.ant",
"at junit.textui.TestRunner",
"at com.intellij.rt.execution.junit",
"at java.lang.reflect.Method.invoke",
"at org.apache.maven.",
"at org.codehaus.",
"at org.junit.internal.runners.",
"at junit.framework.JUnit4TestAdapter"
};
public EnhancedJunitTestRunnerFilter() {
}
/**
* Filter out stack trace lines coming from the various JUnit TestRunners.
*/
public String filter(String in) {
if (in == null) {
return null;
}
//
// restore the one instance of Method.invoke that we actually want
//
if (in.indexOf("at junit.framework.TestCase.runTest") != -1) {
- return "\tat java.lang.reflect.Method.invoke(X)\n" + in;
+ return "\tat java.lang.reflect.Method.invoke(X)\n\t" + in.trim();
}
for (int i = 0; i < PATTERNS.length; i++) {
if(in.indexOf(PATTERNS[i]) != -1) {
return null;
}
}
if (util.match("/\\sat /", in)) {
return "\t" + in.trim();
}
return in;
}
}
| true
| true
|
public String filter(String in) {
if (in == null) {
return null;
}
//
// restore the one instance of Method.invoke that we actually want
//
if (in.indexOf("at junit.framework.TestCase.runTest") != -1) {
return "\tat java.lang.reflect.Method.invoke(X)\n" + in;
}
for (int i = 0; i < PATTERNS.length; i++) {
if(in.indexOf(PATTERNS[i]) != -1) {
return null;
}
}
if (util.match("/\\sat /", in)) {
return "\t" + in.trim();
}
return in;
}
|
public String filter(String in) {
if (in == null) {
return null;
}
//
// restore the one instance of Method.invoke that we actually want
//
if (in.indexOf("at junit.framework.TestCase.runTest") != -1) {
return "\tat java.lang.reflect.Method.invoke(X)\n\t" + in.trim();
}
for (int i = 0; i < PATTERNS.length; i++) {
if(in.indexOf(PATTERNS[i]) != -1) {
return null;
}
}
if (util.match("/\\sat /", in)) {
return "\t" + in.trim();
}
return in;
}
|
diff --git a/src/FE_SRC_COMMON/com/ForgeEssentials/core/misc/UnfriendlyItemList.java b/src/FE_SRC_COMMON/com/ForgeEssentials/core/misc/UnfriendlyItemList.java
index fd2467b12..9c30182e4 100644
--- a/src/FE_SRC_COMMON/com/ForgeEssentials/core/misc/UnfriendlyItemList.java
+++ b/src/FE_SRC_COMMON/com/ForgeEssentials/core/misc/UnfriendlyItemList.java
@@ -1,195 +1,195 @@
package com.ForgeEssentials.core.misc;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.util.HashMap;
import java.util.Set;
import java.util.TreeSet;
import net.minecraft.item.Item;
import net.minecraft.item.ItemBlock;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import com.ForgeEssentials.util.FunctionHelper;
import com.ForgeEssentials.util.OutputHandler;
import com.google.common.base.Strings;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import cpw.mods.fml.common.registry.GameData;
import cpw.mods.fml.common.registry.ItemData;
public abstract class UnfriendlyItemList
{
private static final HashBiMap<String, Integer> map = HashBiMap.create();
private static final String VANILLA = "vanilla";
private static final String UNKNOWN_SOURCE = "unknownSource";
private static final String UNKNOWN_TYPE = "unknownType";
private UnfriendlyItemList()
{
}
/**
* should be called at PostLoad.
*/
public static void modStep()
{
HashMap<Integer, String> gameMap = new HashMap<Integer, String>();
map.clear();
// populate from GameData
{
NBTTagList list = new NBTTagList();
GameData.writeItemData(list);
ItemData data;
String modid;
for (int i = 0; i < list.tagCount(); i++)
{
data = new ItemData((NBTTagCompound) list.tagAt(i));
modid = VANILLA;
if (!data.getModId().equalsIgnoreCase("Minecraft"))
{
modid = data.getModId();
}
gameMap.put(data.getItemId(), modid);
}
}
// now iterrate through ItemList.
HashMap<String, Integer> duplicates = new HashMap<String, Integer>();
String name;
Integer num;
String tempName;
for (int i = 0; i < Item.itemsList.length; i++)
{
Item item = Item.itemsList[i];
if (item == null)
{
continue;
}
// get the name..
name = item.getUnlocalizedName();
if (name == null)
{
if (item instanceof ItemBlock)
name = "block.";
else
name = "item.";
name = name + item.getClass().getSimpleName();
}
// split items and blocks
name = name.replace("tile.", "block.");
// get source.
tempName = gameMap.get(item.itemID);
if (Strings.isNullOrEmpty(tempName))
{
name = UNKNOWN_SOURCE+"." + name;
}
else
{
name = tempName + "." + name;
}
// add numbers to the end of duplicates
num = duplicates.get(name);
if (num == null)
{
duplicates.put(name, 0);
}
else
{
num++;
duplicates.put(name, num);
name += num;
}
name = name.replace(' ', '_');
// save
- if (!map.containsKey(name)) map.put(name, item.itemID);
+ if (!map.containsKey(name) && map.containsValue(item.itemID)) map.put(name, item.itemID);
else OutputHandler.severe("Double item! Name:" + name + " ID:" + item.itemID);
}
}
/**
* @param name name of the block.
* @return -1 if the name does not exist.
*/
public static int getId(String name)
{
Integer id = map.get(name);
return id == null ? -1 : id;
}
/**
* @Param ID
* @return null if the ID does not exist
*/
public static String getName(int id)
{
String name = map.inverse().get(id);
if (Strings.isNullOrEmpty(name))
return UNKNOWN_SOURCE+"."+UNKNOWN_TYPE+"."+id;
else
return name;
}
public static Set<String> getNameSet()
{
return map.keySet();
}
public static void output(File output)
{
try
{
output.createNewFile();
BufferedWriter writer = new BufferedWriter(new FileWriter(output));
writer.write("#// ------------ Item-ID-List ------------ \\\\#");
writer.newLine();
writer.write("#// --------------- " + FunctionHelper.getCurrentDateString() + " --------------- \\\\#");
writer.newLine();
writer.write("#// ------------------------------------------ \\\\#");
writer.newLine();
writer.newLine();
TreeSet<Integer> ids = new TreeSet<Integer>();
BiMap<Integer, String> inverse = map.inverse();
// order ids.
for (Integer id : map.inverse().keySet())
{
ids.add(id);
}
String str;
for (Integer id : ids)
{
str = String.format("%-7s", id);
str = str + " == " + inverse.get(id);
writer.write(str);
writer.newLine();
}
writer.close();
}
catch (Exception e)
{
}
}
}
| true
| true
|
public static void modStep()
{
HashMap<Integer, String> gameMap = new HashMap<Integer, String>();
map.clear();
// populate from GameData
{
NBTTagList list = new NBTTagList();
GameData.writeItemData(list);
ItemData data;
String modid;
for (int i = 0; i < list.tagCount(); i++)
{
data = new ItemData((NBTTagCompound) list.tagAt(i));
modid = VANILLA;
if (!data.getModId().equalsIgnoreCase("Minecraft"))
{
modid = data.getModId();
}
gameMap.put(data.getItemId(), modid);
}
}
// now iterrate through ItemList.
HashMap<String, Integer> duplicates = new HashMap<String, Integer>();
String name;
Integer num;
String tempName;
for (int i = 0; i < Item.itemsList.length; i++)
{
Item item = Item.itemsList[i];
if (item == null)
{
continue;
}
// get the name..
name = item.getUnlocalizedName();
if (name == null)
{
if (item instanceof ItemBlock)
name = "block.";
else
name = "item.";
name = name + item.getClass().getSimpleName();
}
// split items and blocks
name = name.replace("tile.", "block.");
// get source.
tempName = gameMap.get(item.itemID);
if (Strings.isNullOrEmpty(tempName))
{
name = UNKNOWN_SOURCE+"." + name;
}
else
{
name = tempName + "." + name;
}
// add numbers to the end of duplicates
num = duplicates.get(name);
if (num == null)
{
duplicates.put(name, 0);
}
else
{
num++;
duplicates.put(name, num);
name += num;
}
name = name.replace(' ', '_');
// save
if (!map.containsKey(name)) map.put(name, item.itemID);
else OutputHandler.severe("Double item! Name:" + name + " ID:" + item.itemID);
}
}
|
public static void modStep()
{
HashMap<Integer, String> gameMap = new HashMap<Integer, String>();
map.clear();
// populate from GameData
{
NBTTagList list = new NBTTagList();
GameData.writeItemData(list);
ItemData data;
String modid;
for (int i = 0; i < list.tagCount(); i++)
{
data = new ItemData((NBTTagCompound) list.tagAt(i));
modid = VANILLA;
if (!data.getModId().equalsIgnoreCase("Minecraft"))
{
modid = data.getModId();
}
gameMap.put(data.getItemId(), modid);
}
}
// now iterrate through ItemList.
HashMap<String, Integer> duplicates = new HashMap<String, Integer>();
String name;
Integer num;
String tempName;
for (int i = 0; i < Item.itemsList.length; i++)
{
Item item = Item.itemsList[i];
if (item == null)
{
continue;
}
// get the name..
name = item.getUnlocalizedName();
if (name == null)
{
if (item instanceof ItemBlock)
name = "block.";
else
name = "item.";
name = name + item.getClass().getSimpleName();
}
// split items and blocks
name = name.replace("tile.", "block.");
// get source.
tempName = gameMap.get(item.itemID);
if (Strings.isNullOrEmpty(tempName))
{
name = UNKNOWN_SOURCE+"." + name;
}
else
{
name = tempName + "." + name;
}
// add numbers to the end of duplicates
num = duplicates.get(name);
if (num == null)
{
duplicates.put(name, 0);
}
else
{
num++;
duplicates.put(name, num);
name += num;
}
name = name.replace(' ', '_');
// save
if (!map.containsKey(name) && map.containsValue(item.itemID)) map.put(name, item.itemID);
else OutputHandler.severe("Double item! Name:" + name + " ID:" + item.itemID);
}
}
|
diff --git a/bundles/DataLayer/src/test/java/org/paxle/data/db/impl/CommandDBTest.java b/bundles/DataLayer/src/test/java/org/paxle/data/db/impl/CommandDBTest.java
index 19173089..cb9df59a 100644
--- a/bundles/DataLayer/src/test/java/org/paxle/data/db/impl/CommandDBTest.java
+++ b/bundles/DataLayer/src/test/java/org/paxle/data/db/impl/CommandDBTest.java
@@ -1,313 +1,314 @@
/**
* This file is part of the Paxle project.
* Visit http://www.paxle.net for more information.
* Copyright 2007-2008 the original author or authors.
*
* Licensed under the terms of the Common Public License 1.0 ("CPL 1.0").
* Any use, reproduction or distribution of this program constitutes the recipient's acceptance of this agreement.
* The full license text is available under http://www.opensource.org/licenses/cpl1.0.txt
* or in the file LICENSE.txt in the root directory of the Paxle distribution.
*
* Unless required by applicable law or agreed to in writing, this software is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package org.paxle.data.db.impl;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.filefilter.WildcardFileFilter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jmock.Expectations;
import org.jmock.integration.junit3.MockObjectTestCase;
import org.paxle.core.data.IDataSink;
import org.paxle.core.queue.ICommand;
import org.paxle.core.queue.ICommandTracker;
import org.paxle.core.threading.PPM;
public class CommandDBTest extends MockObjectTestCase {
private static final String DERBY_CONFIG_FILE = "../DataLayerDerby/src/main/resources/resources/hibernate/derby.cfg.xml";
private static final String H2_CONFIG_FILE = "../DataLayerH2/src/main/resources/resources/hibernate/H2.cfg.xml";
@SuppressWarnings("unused")
private static final String POSTGRESQL_CONFIG_FILE = "../DataLayerPostgreSQL/src/main/resources/resources/hibernate/postgresql.cfg.xml";
private static final String DERBY_CONNECTION_URL = "jdbc:derby:target/command-db;create=true";
private static final String H2_CONNECTION_URL = "jdbc:h2:target/command-db/cdb;MVCC=TRUE";
@SuppressWarnings("unused")
private static final String POSTGRESQL_CONNECTION_URL = "jdbc:postgresql://%s/command-db";
private ICommandTracker cmdTracker;
private CommandDB cmdDB;
/**
* @return the hibernate mapping files to use
* @throws MalformedURLException
*/
private List<URL> getMappingFiles() throws MalformedURLException {
final File mappingFilesDir = new File("src/main/resources/resources/hibernate/mapping/command/");
assertTrue(mappingFilesDir.exists());
final FileFilter mappingFileFilter = new WildcardFileFilter("*.hbm.xml");
File[] mappingFiles = mappingFilesDir.listFiles(mappingFileFilter);
assertNotNull(mappingFiles);
assertEquals(1, mappingFiles.length);
List<URL> mappingFileURLs = new ArrayList<URL>();
for (File mappingFile : mappingFiles) {
mappingFileURLs.add(mappingFile.toURL());
}
return mappingFileURLs;
}
/**
* @return the hibernate config file to use
* @throws MalformedURLException
*/
private URL getConfigFile(String configFile) throws MalformedURLException {
final File derbyConfigFile = new File(configFile);
assertTrue(derbyConfigFile.exists());
return derbyConfigFile.toURL();
}
/**
* @return additional properties that should be passed to hibernate
*/
private Properties getExtraProperties(String connectionString) {
Properties props = new Properties();
props.put("connection.url", connectionString);
props.put("hibernate.connection.url", connectionString);
return props;
}
@Override
protected void setUp() throws Exception {
super.setUp();
System.setProperty("paxle.data", "target");
// create a dummy command tracker
this.cmdTracker = mock(ICommandTracker.class);
// delete dirs
this.deleteTestDataDirs();
}
private void setupDB(String hibernateConfigFile, String connectionURL) throws MalformedURLException {
// create and init the command-db
this.cmdDB = new CommandDB(
this.getConfigFile(hibernateConfigFile),
this.getMappingFiles(),
this.getExtraProperties(connectionURL),
this.cmdTracker
);
// startup DB
this.cmdDB.start();
}
@Override
protected void tearDown() throws Exception {
super.tearDown();
// close DB
if (cmdDB != null) this.cmdDB.close();
// delete dirs
this.deleteTestDataDirs();
}
private void deleteTestDataDirs() throws IOException {
// delete data directory
File dbDir = new File("target/command-db");
if(dbDir.exists()) FileUtils.deleteDirectory(dbDir);
File bloomDir = new File("target/double-urls-caches");
if (bloomDir.exists()) FileUtils.deleteDirectory(bloomDir);
}
/**
* A dummy data-sink which just prints out the data
*/
private class DummyDataSink implements IDataSink<ICommand> {
private final Semaphore semaphore;
private final PPM ppm = new PPM();
private final Log logger = LogFactory.getLog(this.getClass());
private long counter = 0;
private long lastCounter = 0;
private long timestamp = 0;
public DummyDataSink(Semaphore semaphore) {
this.semaphore = semaphore;
}
public void putData(ICommand cmd) throws Exception {
this.ppm.trick();
this.semaphore.release();
this.counter++;
if (this.counter % 1000 == 0) {
System.err.println(this.counter + " commands dequeued so far.");
}
if (System.currentTimeMillis() - timestamp > 60000) {
this.logger.error(String.format(
"%d commands dequeued in %d ms with '%d' cpm.",
Long.valueOf(counter-lastCounter),
Long.valueOf(System.currentTimeMillis()-this.timestamp),
Integer.valueOf(this.ppm.getPPM())
));
this.timestamp = System.currentTimeMillis();
this.lastCounter = counter;
}
}
public int freeCapacity() throws Exception {
return -1;
}
public boolean freeCapacitySupported() {
return false;
}
public boolean offerData(ICommand cmd) throws Exception {
this.putData(cmd);
return true;
}
}
private void storeUnknownLocation() throws InterruptedException {
final int MAX = 10;
// command-tracker must be called MAX times
checking(new Expectations() {{
exactly(MAX).of(cmdTracker).commandCreated(with(equal("org.paxle.data.db.ICommandDB")), with(any(ICommand.class)));
}});
// generated test URI
LinkedList<URI> knownURIs;
LinkedList<URI> testURI = new LinkedList<URI>();
for (int i=0; i < MAX; i++) {
testURI.add(URI.create("http://test.paxle.net/" + i));
}
knownURIs = (LinkedList<URI>) testURI.clone();
// store them to DB
int knownCount = this.cmdDB.storeUnknownLocations(0, 1, testURI);
assertEquals(0, knownCount);
// create a dummy data-sink
Semaphore s = null;
this.cmdDB.setDataSink(new DummyDataSink(s = new Semaphore(-MAX + 1)));
// wait for all commands to be enqueued
boolean acquired = s.tryAcquire(3, TimeUnit.SECONDS);
assertTrue(acquired);
// testing if all URI are known to the DB
for (URI knownURI : knownURIs) {
// command must be marked as crawled
boolean known = this.cmdDB.isKnownInDB(knownURI,"CrawledCommand");
assertTrue("Unkown URI: " + knownURI, known);
// command must not be enqueued
known = this.cmdDB.isKnownInDB(knownURI,"EnqueuedCommand");
assertFalse("Unkown URI: " + knownURI, known);
// command must be known to the cache
known = this.cmdDB.isKnownInCache(knownURI);
assertTrue(known);
// command must be known to the bloom filter
known = this.cmdDB.isKnownInDoubleURLs(knownURI);
assertTrue(known);
}
}
public void testStoreUnknownLocationDerby() throws MalformedURLException, InterruptedException {
// setup DB
this.setupDB(DERBY_CONFIG_FILE, DERBY_CONNECTION_URL);
// start test
this.storeUnknownLocation();
}
public void testStoreUnknownLocationH2() throws MalformedURLException, InterruptedException {
// setup DB
this.setupDB(H2_CONFIG_FILE, H2_CONNECTION_URL);
// start test
this.storeUnknownLocation();
}
public void _testVeryLargeURLSet() throws MalformedURLException, InterruptedException {
final int MAX = 1000000;
final int chunkSize = 1000;
System.setProperty("derby.storage.pageCacheSize", "2000"); // default 1000
//System.setProperty("derby.storage.pageSize", "32768"); // default 4096 bytes
// setup DB
// this.setupDB(POSTGRESQL_CONFIG_FILE, String.format(POSTGRESQL_CONNECTION_URL,"192.168.10.201"));
+ //this.setupDB(H2_CONFIG_FILE, H2_CONNECTION_URL);
this.setupDB(DERBY_CONFIG_FILE, DERBY_CONNECTION_URL);
// command-tracker must be called MAX times
checking(new Expectations() {{
exactly(MAX).of(cmdTracker).commandCreated(with(equal("org.paxle.data.db.ICommandDB")), with(any(ICommand.class)));
}});
final Semaphore s = new Semaphore(-MAX + 1);
new Thread() {
public void run() {
try {
Thread.sleep(10000);
} catch (InterruptedException e) {}
// create a dummy data-sink
cmdDB.setDataSink(new DummyDataSink(s));
};
}.start();
// store new commands
long start = System.currentTimeMillis();
LinkedList<URI> testURI = new LinkedList<URI>();
for (int i=1; i <= MAX; i++) {
URI nextCommand = URI.create("http://test.paxle.net/" + i);
testURI.add(nextCommand);
if (i % chunkSize == 0 || i == MAX) {
int known = this.cmdDB.storeUnknownLocations(0, 1, testURI);
assertEquals(0, known);
testURI.clear();
}
}
// wait for all commands to be enqueued
s.acquire();
System.out.println(String.format(
"Storing and loading %d URL took %d ms",
Integer.valueOf(MAX),
Long.valueOf(System.currentTimeMillis()-start)
));
}
}
| true
| true
|
public void _testVeryLargeURLSet() throws MalformedURLException, InterruptedException {
final int MAX = 1000000;
final int chunkSize = 1000;
System.setProperty("derby.storage.pageCacheSize", "2000"); // default 1000
//System.setProperty("derby.storage.pageSize", "32768"); // default 4096 bytes
// setup DB
// this.setupDB(POSTGRESQL_CONFIG_FILE, String.format(POSTGRESQL_CONNECTION_URL,"192.168.10.201"));
this.setupDB(DERBY_CONFIG_FILE, DERBY_CONNECTION_URL);
// command-tracker must be called MAX times
checking(new Expectations() {{
exactly(MAX).of(cmdTracker).commandCreated(with(equal("org.paxle.data.db.ICommandDB")), with(any(ICommand.class)));
}});
final Semaphore s = new Semaphore(-MAX + 1);
new Thread() {
public void run() {
try {
Thread.sleep(10000);
} catch (InterruptedException e) {}
// create a dummy data-sink
cmdDB.setDataSink(new DummyDataSink(s));
};
}.start();
// store new commands
long start = System.currentTimeMillis();
LinkedList<URI> testURI = new LinkedList<URI>();
for (int i=1; i <= MAX; i++) {
URI nextCommand = URI.create("http://test.paxle.net/" + i);
testURI.add(nextCommand);
if (i % chunkSize == 0 || i == MAX) {
int known = this.cmdDB.storeUnknownLocations(0, 1, testURI);
assertEquals(0, known);
testURI.clear();
}
}
// wait for all commands to be enqueued
s.acquire();
System.out.println(String.format(
"Storing and loading %d URL took %d ms",
Integer.valueOf(MAX),
Long.valueOf(System.currentTimeMillis()-start)
));
}
|
public void _testVeryLargeURLSet() throws MalformedURLException, InterruptedException {
final int MAX = 1000000;
final int chunkSize = 1000;
System.setProperty("derby.storage.pageCacheSize", "2000"); // default 1000
//System.setProperty("derby.storage.pageSize", "32768"); // default 4096 bytes
// setup DB
// this.setupDB(POSTGRESQL_CONFIG_FILE, String.format(POSTGRESQL_CONNECTION_URL,"192.168.10.201"));
//this.setupDB(H2_CONFIG_FILE, H2_CONNECTION_URL);
this.setupDB(DERBY_CONFIG_FILE, DERBY_CONNECTION_URL);
// command-tracker must be called MAX times
checking(new Expectations() {{
exactly(MAX).of(cmdTracker).commandCreated(with(equal("org.paxle.data.db.ICommandDB")), with(any(ICommand.class)));
}});
final Semaphore s = new Semaphore(-MAX + 1);
new Thread() {
public void run() {
try {
Thread.sleep(10000);
} catch (InterruptedException e) {}
// create a dummy data-sink
cmdDB.setDataSink(new DummyDataSink(s));
};
}.start();
// store new commands
long start = System.currentTimeMillis();
LinkedList<URI> testURI = new LinkedList<URI>();
for (int i=1; i <= MAX; i++) {
URI nextCommand = URI.create("http://test.paxle.net/" + i);
testURI.add(nextCommand);
if (i % chunkSize == 0 || i == MAX) {
int known = this.cmdDB.storeUnknownLocations(0, 1, testURI);
assertEquals(0, known);
testURI.clear();
}
}
// wait for all commands to be enqueued
s.acquire();
System.out.println(String.format(
"Storing and loading %d URL took %d ms",
Integer.valueOf(MAX),
Long.valueOf(System.currentTimeMillis()-start)
));
}
|
diff --git a/src/main/java/org/atlasapi/query/v2/PeopleController.java b/src/main/java/org/atlasapi/query/v2/PeopleController.java
index e592566bc..c1323c733 100644
--- a/src/main/java/org/atlasapi/query/v2/PeopleController.java
+++ b/src/main/java/org/atlasapi/query/v2/PeopleController.java
@@ -1,108 +1,108 @@
package org.atlasapi.query.v2;
import java.io.IOException;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.atlasapi.application.ApplicationConfiguration;
import org.atlasapi.application.query.ApplicationConfigurationFetcher;
import org.atlasapi.media.entity.Person;
import org.atlasapi.media.entity.Publisher;
import org.atlasapi.output.AtlasErrorSummary;
import org.atlasapi.output.AtlasModelWriter;
import org.atlasapi.persistence.content.PeopleQueryResolver;
import org.atlasapi.persistence.logging.AdapterLog;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import com.google.common.base.Optional;
import com.google.common.base.Strings;
import com.google.common.collect.Iterables;
import com.metabroadcast.common.http.HttpStatusCode;
import com.metabroadcast.common.ids.SubstitutionTableNumberCodec;
import com.metabroadcast.common.query.Selection;
import com.metabroadcast.common.query.Selection.SelectionBuilder;
@Controller
public class PeopleController extends BaseController<Iterable<Person>> {
private static final SelectionBuilder selectionBuilder = Selection.builder().withDefaultLimit(25).withMaxLimit(50);
private static final AtlasErrorSummary NOT_FOUND = new AtlasErrorSummary(new NullPointerException())
.withErrorCode("Person not found")
.withStatusCode(HttpStatusCode.NOT_FOUND);
private static final AtlasErrorSummary FORBIDDEN = new AtlasErrorSummary(new NullPointerException())
.withStatusCode(HttpStatusCode.FORBIDDEN);
private final PeopleQueryResolver resolver;
private final PeopleWriteController personWriteController;
public PeopleController(PeopleQueryResolver resolver, ApplicationConfigurationFetcher configFetcher,
AdapterLog log, AtlasModelWriter<Iterable<Person>> outputter, PeopleWriteController personWriteController) {
super(configFetcher, log, outputter, SubstitutionTableNumberCodec.lowerCaseOnly());
this.resolver = resolver;
this.personWriteController = personWriteController;
}
@RequestMapping("/3.0/people.*")
public void content(HttpServletRequest request, HttpServletResponse response) throws IOException {
try {
ApplicationConfiguration config = possibleAppConfig(request)
.valueOrDefault(ApplicationConfiguration.defaultConfiguration());
String uri = request.getParameter("uri");
String id = request.getParameter("id");
String publisher = request.getParameter("publisher");
if (Strings.isNullOrEmpty(uri) ^ Strings.isNullOrEmpty(id) ^ Strings.isNullOrEmpty(publisher) ) {
throw new IllegalArgumentException("specify exactly one of 'uri', 'id' or 'publisher'");
}
Iterable<Person> people;
if (uri != null || id != null) {
Optional<Person> person;
if (uri != null) {
person = resolver.person(uri, config);
} else {
person = resolver.person(idCodec.decode(id).longValue(), config);
}
- if(!config.isEnabled(person.get().getPublisher())) {
- errorViewFor(request, response, FORBIDDEN);
+ if(!person.isPresent()) {
+ errorViewFor(request, response, NOT_FOUND);
return;
}
people = person.asSet();
} else {
List<Publisher> publishers = Publisher.fromCsv(publisher);
for (Publisher pub : publishers) {
if (!config.isEnabled(pub)) {
errorViewFor(request, response, FORBIDDEN);
return;
}
}
people = resolver.people(publishers, config, selectionBuilder.build(request));
}
if(Iterables.size(people) == 0) {
errorViewFor(request, response, NOT_FOUND);
return;
}
modelAndViewFor(request, response, people, config);
} catch (Exception e) {
errorViewFor(request, response, AtlasErrorSummary.forException(e));
}
}
@RequestMapping(value="/3.0/people.json", method = RequestMethod.POST)
public Void postContent(HttpServletRequest req, HttpServletResponse resp) {
return personWriteController.postPerson(req, resp);
}
@RequestMapping(value="/3.0/people.json", method = RequestMethod.PUT)
public Void putContent(HttpServletRequest req, HttpServletResponse resp) {
return personWriteController.putPerson(req, resp);
}
}
| true
| true
|
public void content(HttpServletRequest request, HttpServletResponse response) throws IOException {
try {
ApplicationConfiguration config = possibleAppConfig(request)
.valueOrDefault(ApplicationConfiguration.defaultConfiguration());
String uri = request.getParameter("uri");
String id = request.getParameter("id");
String publisher = request.getParameter("publisher");
if (Strings.isNullOrEmpty(uri) ^ Strings.isNullOrEmpty(id) ^ Strings.isNullOrEmpty(publisher) ) {
throw new IllegalArgumentException("specify exactly one of 'uri', 'id' or 'publisher'");
}
Iterable<Person> people;
if (uri != null || id != null) {
Optional<Person> person;
if (uri != null) {
person = resolver.person(uri, config);
} else {
person = resolver.person(idCodec.decode(id).longValue(), config);
}
if(!config.isEnabled(person.get().getPublisher())) {
errorViewFor(request, response, FORBIDDEN);
return;
}
people = person.asSet();
} else {
List<Publisher> publishers = Publisher.fromCsv(publisher);
for (Publisher pub : publishers) {
if (!config.isEnabled(pub)) {
errorViewFor(request, response, FORBIDDEN);
return;
}
}
people = resolver.people(publishers, config, selectionBuilder.build(request));
}
if(Iterables.size(people) == 0) {
errorViewFor(request, response, NOT_FOUND);
return;
}
modelAndViewFor(request, response, people, config);
} catch (Exception e) {
errorViewFor(request, response, AtlasErrorSummary.forException(e));
}
}
|
public void content(HttpServletRequest request, HttpServletResponse response) throws IOException {
try {
ApplicationConfiguration config = possibleAppConfig(request)
.valueOrDefault(ApplicationConfiguration.defaultConfiguration());
String uri = request.getParameter("uri");
String id = request.getParameter("id");
String publisher = request.getParameter("publisher");
if (Strings.isNullOrEmpty(uri) ^ Strings.isNullOrEmpty(id) ^ Strings.isNullOrEmpty(publisher) ) {
throw new IllegalArgumentException("specify exactly one of 'uri', 'id' or 'publisher'");
}
Iterable<Person> people;
if (uri != null || id != null) {
Optional<Person> person;
if (uri != null) {
person = resolver.person(uri, config);
} else {
person = resolver.person(idCodec.decode(id).longValue(), config);
}
if(!person.isPresent()) {
errorViewFor(request, response, NOT_FOUND);
return;
}
people = person.asSet();
} else {
List<Publisher> publishers = Publisher.fromCsv(publisher);
for (Publisher pub : publishers) {
if (!config.isEnabled(pub)) {
errorViewFor(request, response, FORBIDDEN);
return;
}
}
people = resolver.people(publishers, config, selectionBuilder.build(request));
}
if(Iterables.size(people) == 0) {
errorViewFor(request, response, NOT_FOUND);
return;
}
modelAndViewFor(request, response, people, config);
} catch (Exception e) {
errorViewFor(request, response, AtlasErrorSummary.forException(e));
}
}
|
diff --git a/src/de/bsd/zwitscher/TweetDB.java b/src/de/bsd/zwitscher/TweetDB.java
index ab156ff..a1b67d4 100644
--- a/src/de/bsd/zwitscher/TweetDB.java
+++ b/src/de/bsd/zwitscher/TweetDB.java
@@ -1,450 +1,450 @@
package de.bsd.zwitscher;
import java.lang.String;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.database.sqlite.SQLiteDatabase.CursorFactory;
/**
* This class is interfacing with the SQLite3 database on the
* handset to store statuses, lists, users and so on.
*
* @author Heiko W. Rupp
*/
public class TweetDB {
private static final String TABLE_STATUSES = "statuses";
private static final String TABLE_LAST_READ = "lastRead";
private static final String TABLE_LISTS = "lists";
private static final String TABLE_USERS = "users";
static final String STATUS = "STATUS";
static final String ACCOUNT_ID = "ACCOUNT_ID";
static final String ACCOUNT_ID_IS = ACCOUNT_ID + "=?";
private TweetDBOpenHelper tdHelper;
private static final String TABLE_DIRECTS = "directs";
private final String account;
public TweetDB(Context context, int accountId) {
tdHelper = new TweetDBOpenHelper(context, "TWEET_DB", null, 1);
account = String.valueOf(accountId);
}
private class TweetDBOpenHelper extends SQLiteOpenHelper {
static final String CREATE_TABLE = "CREATE TABLE ";
public TweetDBOpenHelper(Context context, String name,
CursorFactory factory, int version) {
super(context, name, factory, version);
}
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL(CREATE_TABLE + TABLE_STATUSES + " (" +
"ID LONG, " +
ACCOUNT_ID + " LONG, " +
"LIST_ID LONG, " +
"I_REP_TO LONG, " +
"STATUS STRING " +
")"
);
db.execSQL(CREATE_TABLE + TABLE_DIRECTS + " (" +
"ID LONG, " +
ACCOUNT_ID + " LONG, " +
"MESSAGE_JSON STRING " +
")"
);
db.execSQL(CREATE_TABLE + TABLE_LAST_READ + " (" + //
"list_id LONG, " + //
"last_read_id LONG, " + // Last Id read by the user
"last_fetched_id LONG, " + // last Id fetched from the server
ACCOUNT_ID + " LONG " +
")"
);
db.execSQL(CREATE_TABLE + TABLE_LISTS + " (" + //
"name TEXT, " + //
"id LONG, " +
ACCOUNT_ID + " LONG, " +
"list_json TEXT" +
" )"
);
db.execSQL(CREATE_TABLE + TABLE_USERS + " (" +
"userId LONG, " + //
ACCOUNT_ID + " LONG, " +
"user_json STRING )"
);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
}
}
/**
* Return the id of the status that was last read
* @param list_id id of the list
* @return
*/
long getLastRead(int list_id) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
Cursor c = db.query(TABLE_LAST_READ, new String[] {"last_read_id"}, "list_id = ? AND " + ACCOUNT_ID_IS, new String[] {String.valueOf(list_id),account}, null, null, null);
Long ret;
if (c.getCount()==0)
ret = -1L;
else {
c.moveToFirst();
ret = c.getLong(0);
}
c.close();
db.close();
return ret;
}
/**
* Update (or initially store) the last read information of the passed list
* @param list_id List to mark as read
* @param last_read_id Id of the last read status
*/
void updateOrInsertLastRead(int list_id, long last_read_id) {
ContentValues cv = new ContentValues();
cv.put("list_id", list_id);
cv.put("last_read_id", last_read_id);
cv.put(ACCOUNT_ID,account);
SQLiteDatabase db = tdHelper.getWritableDatabase();
int updated = db.update(TABLE_LAST_READ, cv, "list_id = ? AND " + ACCOUNT_ID_IS, new String[] {String.valueOf(list_id),account});
if (updated==0) {
// row not yet present
db.insert(TABLE_LAST_READ, null, cv);
}
db.close();
}
/**
* Return Infos about all lists in the DB
* @return
* @todo return the json object
*/
Map<String, Integer> getLists() {
SQLiteDatabase db = tdHelper.getReadableDatabase();
Map<String,Integer> ret = new HashMap<String,Integer>();
Cursor c = db.query(TABLE_LISTS, new String[] {"name","id"}, ACCOUNT_ID_IS, new String[]{account}, null, null, "name");
if (c.getCount()>0){
c.moveToFirst();
do {
String name = c.getString(0);
Integer id = c.getInt(1);
ret.put(name, id);
} while (c.moveToNext());
}
c.close();
db.close();
return ret;
}
/**
* Add a new list to the database
* @param name Name of the lise
* @param id Id of the list
* @param json Full json string object of the list
*/
public void addList(String name, int id, String json) {
ContentValues cv = new ContentValues();
cv.put("name", name);
cv.put("id",id);
cv.put(ACCOUNT_ID,account);
cv.put("list_json",json);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.insert(TABLE_LISTS, null, cv);
db.close();
}
/**
* Delete the list with the passed ID in the DB
* @param id Id of the list to delete
* @todo Also remove statuses for the passed list
*/
public void removeList(Integer id) {
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.delete(TABLE_LISTS, "id = ? AND " +ACCOUNT_ID_IS, new String[]{id.toString(),account});
db.close();
}
/**
* Store a new Status object in the DB. See {@link twitter4j.Status}
* @param id Id of the status
* @param i_reply_id Id of a status the passed one is a reply to
* @param list_id Id of a list - pseudo IDs apply --see {@link de.bsd.zwitscher.TwitterHelper#getTimeline(twitter4j.Paging, int, boolean)}
* @param status_json
*/
public void storeStatus(long id, long i_reply_id, long list_id, String status_json) {
ContentValues cv = new ContentValues(4);
cv.put("ID", id);
cv.put("I_REP_TO", i_reply_id);
cv.put("LIST_ID", list_id);
cv.put(ACCOUNT_ID,account);
cv.put(STATUS,status_json);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.insert(TABLE_STATUSES, null, cv);
db.close();
}
/**
* Update the stored TwitterResponse object. This may be necessary when e.g. the
* favorite status has been changed on it.
* @param id Id of the object
* @param status_json Json representation of it.
*/
public void updateStatus(long id, String status_json) {
ContentValues cv = new ContentValues(1);
cv.put(STATUS, status_json);
cv.put(ACCOUNT_ID,account);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.update(TABLE_STATUSES,cv,"id = ?", new String[]{String.valueOf(id)});
db.close();
}
/**
* Return the blob of one stored status by its (unique) id.
* @param statusId The id of the status
* @return The json_string if the status exists in the DB or null otherwise
*/
public String getStatusObjectById(long statusId) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
String ret = null;
Cursor c;
c= db.query(TABLE_STATUSES,new String[]{STATUS},"id = ? AND " + ACCOUNT_ID_IS,new String[]{String.valueOf(statusId),account},null,null,null);
if (c.getCount()>0){
c.moveToFirst();
ret = c.getString(0);
}
c.close();
db.close();
return ret;
}
/**
* Get all statuses that are marked as a reply to the passed one.
* @param inRepyId Id of the original status
* @return List of Json_objects that represent the replies
*/
public List<String> getReplies(long inRepyId) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
List<String> ret = new ArrayList<String>();
Cursor c ;
c = db.query(TABLE_STATUSES,new String[]{STATUS}, "i_rep_to = ? && " + ACCOUNT_ID_IS
,new String[]{String.valueOf(inRepyId),account},null,null,"ID DESC");
if (c.getCount()>0) {
c.moveToFirst();
do {
String json = c.getString(0);
ret.add(json);
} while (c.moveToNext());
}
c.close();
db.close();
return ret;
}
/**
* Return a list of Responses along for the passed list id.
* @param sinceId What is the oldest status to look after
* @param howMany How many entries shall be returned
* @param list_id From which list?
* @return List of JResponse objects
*/
public List<String> getStatusesObjsOlderThan(long sinceId, int howMany, long list_id) {
List<String> ret = new ArrayList<String>();
SQLiteDatabase db = tdHelper.getReadableDatabase();
Cursor c;
if (sinceId>-1)
c = db.query(TABLE_STATUSES,new String[]{STATUS},"id < ? AND list_id = ? AND " +ACCOUNT_ID_IS,new String[]{String.valueOf(sinceId),String.valueOf(list_id),account},null,null,"ID DESC",String.valueOf(howMany));
else
c = db.query(TABLE_STATUSES,new String[]{STATUS},"list_id = ? AND " + ACCOUNT_ID_IS,new String[]{String.valueOf(list_id),account},null,null,"ID DESC",String.valueOf(howMany));
if (c.getCount()>0){
c.moveToFirst();
do {
String json = c.getString(0);
ret.add(json);
} while (c.moveToNext());
}
c.close();
db.close();
return ret;
}
public List<String> getDirectsOlderThan(int sinceId, int howMany) {
List<String> ret = new ArrayList<String>();
SQLiteDatabase db = tdHelper.getReadableDatabase();
Cursor c;
if (sinceId>-1)
c = db.query(TABLE_DIRECTS,new String[]{"MESSAGE_JSON"},"id < ? AND " +ACCOUNT_ID_IS,new String[]{String.valueOf(sinceId),account},null,null,"ID DESC",String.valueOf(howMany));
else
c = db.query(TABLE_DIRECTS,new String[]{"MESSAGE_JSON"}, ACCOUNT_ID_IS,new String[]{account},null,null,"ID DESC",String.valueOf(howMany));
if (c.getCount()>0){
c.moveToFirst();
do {
String json = c.getString(0);
ret.add(json);
} while (c.moveToNext());
}
c.close();
db.close();
return ret;
}
/**
* Purge the last read table.
*/
public void resetLastRead() {
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.execSQL("DELETE FROM " + TABLE_LAST_READ);
db.close();
}
/**
* Purge the statuses table.
*/
public void cleanTweets() {
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.execSQL("DELETE FROM " + TABLE_STATUSES);
db.close();
}
/**
* Returns a user by its ID from the database if it exists or null.
* @param userId Id of the user
* @param accountId Id of the account to use
* @return Basic JSON string of the user info or null.
*/
public String getUserById(int userId, int accountId) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
String ret = null;
Cursor c;
c = db.query(TABLE_USERS,new String[]{"user_json"},"userId = ? AND " + ACCOUNT_ID + " = ?",new String[] { String.valueOf(userId), String.valueOf(accountId)},null, null, null);
if (c.getCount()>0) {
c.moveToFirst();
ret = c.getString(0);
}
return ret;
}
/**
* Insert a user into the database.
* @param userId The Id of the user to insert
* @param json JSON representation of the User object
*/
public void insertUser(int userId, String json) {
ContentValues cv = new ContentValues(3);
cv.put("userId",userId);
cv.put(ACCOUNT_ID,account);
cv.put("user_json",json);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.insert(TABLE_USERS,null,cv);
db.close();
}
/**
* Update an existing user in the database.
* @param userId
* @param json
*/
public void updateUser(int userId, String json) {
ContentValues cv = new ContentValues(1);
cv.put("user_json",json);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.update(TABLE_USERS,cv,"userId = ? AND "+ ACCOUNT_ID + " = ?",new String[] { String.valueOf(userId),account});
db.close();
}
/**
* Insert a direct message into the DB
* @param id ID of the message
* @param json Json string of the message
*/
public void insertDirect(int id , String json) {
ContentValues cv = new ContentValues(3);
cv.put("id",id);
cv.put(ACCOUNT_ID,account);
cv.put("user_json",json);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.insert(TABLE_USERS,null,cv);
db.close();
}
/**
* Get a direct message from th DB
* @param id ID of the message to look up
* @return JSON string of the message or null if not found
*/
public String getDirectById(int id) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
String ret = null;
Cursor c;
- c = db.query(TABLE_DIRECTS,new String[]{"message_json"},"userId = ? AND " + ACCOUNT_ID_IS,new String[] { String.valueOf(id), account},null, null, null);
+ c = db.query(TABLE_DIRECTS,new String[]{"message_json"},"id = ? AND " + ACCOUNT_ID_IS,new String[] { String.valueOf(id), account},null, null, null);
if (c.getCount()>0) {
c.moveToFirst();
ret = c.getString(0);
}
return ret;
}
/**
* Get the last <i>number</i> direct messages from the DB
* @param number Numer of messages to get
* @return List of messages or empyt list
*/
public List<String> getDirects(int number) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
List<String > ret = new ArrayList<String>();
Cursor c;
c = db.query(TABLE_DIRECTS,new String[]{"message_json"},ACCOUNT_ID_IS,new String[] { account},null, null, "ID DESC",String.valueOf(number));
if (c.getCount()>0) {
c.moveToFirst();
do {
String json = c.getString(0);
ret.add(json);
} while ((c.moveToNext()));
}
return ret;
}
}
| true
| true
|
long getLastRead(int list_id) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
Cursor c = db.query(TABLE_LAST_READ, new String[] {"last_read_id"}, "list_id = ? AND " + ACCOUNT_ID_IS, new String[] {String.valueOf(list_id),account}, null, null, null);
Long ret;
if (c.getCount()==0)
ret = -1L;
else {
c.moveToFirst();
ret = c.getLong(0);
}
c.close();
db.close();
return ret;
}
/**
* Update (or initially store) the last read information of the passed list
* @param list_id List to mark as read
* @param last_read_id Id of the last read status
*/
void updateOrInsertLastRead(int list_id, long last_read_id) {
ContentValues cv = new ContentValues();
cv.put("list_id", list_id);
cv.put("last_read_id", last_read_id);
cv.put(ACCOUNT_ID,account);
SQLiteDatabase db = tdHelper.getWritableDatabase();
int updated = db.update(TABLE_LAST_READ, cv, "list_id = ? AND " + ACCOUNT_ID_IS, new String[] {String.valueOf(list_id),account});
if (updated==0) {
// row not yet present
db.insert(TABLE_LAST_READ, null, cv);
}
db.close();
}
/**
* Return Infos about all lists in the DB
* @return
* @todo return the json object
*/
Map<String, Integer> getLists() {
SQLiteDatabase db = tdHelper.getReadableDatabase();
Map<String,Integer> ret = new HashMap<String,Integer>();
Cursor c = db.query(TABLE_LISTS, new String[] {"name","id"}, ACCOUNT_ID_IS, new String[]{account}, null, null, "name");
if (c.getCount()>0){
c.moveToFirst();
do {
String name = c.getString(0);
Integer id = c.getInt(1);
ret.put(name, id);
} while (c.moveToNext());
}
c.close();
db.close();
return ret;
}
/**
* Add a new list to the database
* @param name Name of the lise
* @param id Id of the list
* @param json Full json string object of the list
*/
public void addList(String name, int id, String json) {
ContentValues cv = new ContentValues();
cv.put("name", name);
cv.put("id",id);
cv.put(ACCOUNT_ID,account);
cv.put("list_json",json);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.insert(TABLE_LISTS, null, cv);
db.close();
}
/**
* Delete the list with the passed ID in the DB
* @param id Id of the list to delete
* @todo Also remove statuses for the passed list
*/
public void removeList(Integer id) {
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.delete(TABLE_LISTS, "id = ? AND " +ACCOUNT_ID_IS, new String[]{id.toString(),account});
db.close();
}
/**
* Store a new Status object in the DB. See {@link twitter4j.Status}
* @param id Id of the status
* @param i_reply_id Id of a status the passed one is a reply to
* @param list_id Id of a list - pseudo IDs apply --see {@link de.bsd.zwitscher.TwitterHelper#getTimeline(twitter4j.Paging, int, boolean)}
* @param status_json
*/
public void storeStatus(long id, long i_reply_id, long list_id, String status_json) {
ContentValues cv = new ContentValues(4);
cv.put("ID", id);
cv.put("I_REP_TO", i_reply_id);
cv.put("LIST_ID", list_id);
cv.put(ACCOUNT_ID,account);
cv.put(STATUS,status_json);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.insert(TABLE_STATUSES, null, cv);
db.close();
}
/**
* Update the stored TwitterResponse object. This may be necessary when e.g. the
* favorite status has been changed on it.
* @param id Id of the object
* @param status_json Json representation of it.
*/
public void updateStatus(long id, String status_json) {
ContentValues cv = new ContentValues(1);
cv.put(STATUS, status_json);
cv.put(ACCOUNT_ID,account);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.update(TABLE_STATUSES,cv,"id = ?", new String[]{String.valueOf(id)});
db.close();
}
/**
* Return the blob of one stored status by its (unique) id.
* @param statusId The id of the status
* @return The json_string if the status exists in the DB or null otherwise
*/
public String getStatusObjectById(long statusId) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
String ret = null;
Cursor c;
c= db.query(TABLE_STATUSES,new String[]{STATUS},"id = ? AND " + ACCOUNT_ID_IS,new String[]{String.valueOf(statusId),account},null,null,null);
if (c.getCount()>0){
c.moveToFirst();
ret = c.getString(0);
}
c.close();
db.close();
return ret;
}
/**
* Get all statuses that are marked as a reply to the passed one.
* @param inRepyId Id of the original status
* @return List of Json_objects that represent the replies
*/
public List<String> getReplies(long inRepyId) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
List<String> ret = new ArrayList<String>();
Cursor c ;
c = db.query(TABLE_STATUSES,new String[]{STATUS}, "i_rep_to = ? && " + ACCOUNT_ID_IS
,new String[]{String.valueOf(inRepyId),account},null,null,"ID DESC");
if (c.getCount()>0) {
c.moveToFirst();
do {
String json = c.getString(0);
ret.add(json);
} while (c.moveToNext());
}
c.close();
db.close();
return ret;
}
/**
* Return a list of Responses along for the passed list id.
* @param sinceId What is the oldest status to look after
* @param howMany How many entries shall be returned
* @param list_id From which list?
* @return List of JResponse objects
*/
public List<String> getStatusesObjsOlderThan(long sinceId, int howMany, long list_id) {
List<String> ret = new ArrayList<String>();
SQLiteDatabase db = tdHelper.getReadableDatabase();
Cursor c;
if (sinceId>-1)
c = db.query(TABLE_STATUSES,new String[]{STATUS},"id < ? AND list_id = ? AND " +ACCOUNT_ID_IS,new String[]{String.valueOf(sinceId),String.valueOf(list_id),account},null,null,"ID DESC",String.valueOf(howMany));
else
c = db.query(TABLE_STATUSES,new String[]{STATUS},"list_id = ? AND " + ACCOUNT_ID_IS,new String[]{String.valueOf(list_id),account},null,null,"ID DESC",String.valueOf(howMany));
if (c.getCount()>0){
c.moveToFirst();
do {
String json = c.getString(0);
ret.add(json);
} while (c.moveToNext());
}
c.close();
db.close();
return ret;
}
public List<String> getDirectsOlderThan(int sinceId, int howMany) {
List<String> ret = new ArrayList<String>();
SQLiteDatabase db = tdHelper.getReadableDatabase();
Cursor c;
if (sinceId>-1)
c = db.query(TABLE_DIRECTS,new String[]{"MESSAGE_JSON"},"id < ? AND " +ACCOUNT_ID_IS,new String[]{String.valueOf(sinceId),account},null,null,"ID DESC",String.valueOf(howMany));
else
c = db.query(TABLE_DIRECTS,new String[]{"MESSAGE_JSON"}, ACCOUNT_ID_IS,new String[]{account},null,null,"ID DESC",String.valueOf(howMany));
if (c.getCount()>0){
c.moveToFirst();
do {
String json = c.getString(0);
ret.add(json);
} while (c.moveToNext());
}
c.close();
db.close();
return ret;
}
/**
* Purge the last read table.
*/
public void resetLastRead() {
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.execSQL("DELETE FROM " + TABLE_LAST_READ);
db.close();
}
/**
* Purge the statuses table.
*/
public void cleanTweets() {
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.execSQL("DELETE FROM " + TABLE_STATUSES);
db.close();
}
/**
* Returns a user by its ID from the database if it exists or null.
* @param userId Id of the user
* @param accountId Id of the account to use
* @return Basic JSON string of the user info or null.
*/
public String getUserById(int userId, int accountId) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
String ret = null;
Cursor c;
c = db.query(TABLE_USERS,new String[]{"user_json"},"userId = ? AND " + ACCOUNT_ID + " = ?",new String[] { String.valueOf(userId), String.valueOf(accountId)},null, null, null);
if (c.getCount()>0) {
c.moveToFirst();
ret = c.getString(0);
}
return ret;
}
/**
* Insert a user into the database.
* @param userId The Id of the user to insert
* @param json JSON representation of the User object
*/
public void insertUser(int userId, String json) {
ContentValues cv = new ContentValues(3);
cv.put("userId",userId);
cv.put(ACCOUNT_ID,account);
cv.put("user_json",json);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.insert(TABLE_USERS,null,cv);
db.close();
}
/**
* Update an existing user in the database.
* @param userId
* @param json
*/
public void updateUser(int userId, String json) {
ContentValues cv = new ContentValues(1);
cv.put("user_json",json);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.update(TABLE_USERS,cv,"userId = ? AND "+ ACCOUNT_ID + " = ?",new String[] { String.valueOf(userId),account});
db.close();
}
/**
* Insert a direct message into the DB
* @param id ID of the message
* @param json Json string of the message
*/
public void insertDirect(int id , String json) {
ContentValues cv = new ContentValues(3);
cv.put("id",id);
cv.put(ACCOUNT_ID,account);
cv.put("user_json",json);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.insert(TABLE_USERS,null,cv);
db.close();
}
/**
* Get a direct message from th DB
* @param id ID of the message to look up
* @return JSON string of the message or null if not found
*/
public String getDirectById(int id) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
String ret = null;
Cursor c;
c = db.query(TABLE_DIRECTS,new String[]{"message_json"},"userId = ? AND " + ACCOUNT_ID_IS,new String[] { String.valueOf(id), account},null, null, null);
if (c.getCount()>0) {
c.moveToFirst();
ret = c.getString(0);
}
return ret;
}
/**
* Get the last <i>number</i> direct messages from the DB
* @param number Numer of messages to get
* @return List of messages or empyt list
*/
public List<String> getDirects(int number) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
List<String > ret = new ArrayList<String>();
Cursor c;
c = db.query(TABLE_DIRECTS,new String[]{"message_json"},ACCOUNT_ID_IS,new String[] { account},null, null, "ID DESC",String.valueOf(number));
if (c.getCount()>0) {
c.moveToFirst();
do {
String json = c.getString(0);
ret.add(json);
} while ((c.moveToNext()));
}
return ret;
}
}
|
long getLastRead(int list_id) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
Cursor c = db.query(TABLE_LAST_READ, new String[] {"last_read_id"}, "list_id = ? AND " + ACCOUNT_ID_IS, new String[] {String.valueOf(list_id),account}, null, null, null);
Long ret;
if (c.getCount()==0)
ret = -1L;
else {
c.moveToFirst();
ret = c.getLong(0);
}
c.close();
db.close();
return ret;
}
/**
* Update (or initially store) the last read information of the passed list
* @param list_id List to mark as read
* @param last_read_id Id of the last read status
*/
void updateOrInsertLastRead(int list_id, long last_read_id) {
ContentValues cv = new ContentValues();
cv.put("list_id", list_id);
cv.put("last_read_id", last_read_id);
cv.put(ACCOUNT_ID,account);
SQLiteDatabase db = tdHelper.getWritableDatabase();
int updated = db.update(TABLE_LAST_READ, cv, "list_id = ? AND " + ACCOUNT_ID_IS, new String[] {String.valueOf(list_id),account});
if (updated==0) {
// row not yet present
db.insert(TABLE_LAST_READ, null, cv);
}
db.close();
}
/**
* Return Infos about all lists in the DB
* @return
* @todo return the json object
*/
Map<String, Integer> getLists() {
SQLiteDatabase db = tdHelper.getReadableDatabase();
Map<String,Integer> ret = new HashMap<String,Integer>();
Cursor c = db.query(TABLE_LISTS, new String[] {"name","id"}, ACCOUNT_ID_IS, new String[]{account}, null, null, "name");
if (c.getCount()>0){
c.moveToFirst();
do {
String name = c.getString(0);
Integer id = c.getInt(1);
ret.put(name, id);
} while (c.moveToNext());
}
c.close();
db.close();
return ret;
}
/**
* Add a new list to the database
* @param name Name of the lise
* @param id Id of the list
* @param json Full json string object of the list
*/
public void addList(String name, int id, String json) {
ContentValues cv = new ContentValues();
cv.put("name", name);
cv.put("id",id);
cv.put(ACCOUNT_ID,account);
cv.put("list_json",json);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.insert(TABLE_LISTS, null, cv);
db.close();
}
/**
* Delete the list with the passed ID in the DB
* @param id Id of the list to delete
* @todo Also remove statuses for the passed list
*/
public void removeList(Integer id) {
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.delete(TABLE_LISTS, "id = ? AND " +ACCOUNT_ID_IS, new String[]{id.toString(),account});
db.close();
}
/**
* Store a new Status object in the DB. See {@link twitter4j.Status}
* @param id Id of the status
* @param i_reply_id Id of a status the passed one is a reply to
* @param list_id Id of a list - pseudo IDs apply --see {@link de.bsd.zwitscher.TwitterHelper#getTimeline(twitter4j.Paging, int, boolean)}
* @param status_json
*/
public void storeStatus(long id, long i_reply_id, long list_id, String status_json) {
ContentValues cv = new ContentValues(4);
cv.put("ID", id);
cv.put("I_REP_TO", i_reply_id);
cv.put("LIST_ID", list_id);
cv.put(ACCOUNT_ID,account);
cv.put(STATUS,status_json);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.insert(TABLE_STATUSES, null, cv);
db.close();
}
/**
* Update the stored TwitterResponse object. This may be necessary when e.g. the
* favorite status has been changed on it.
* @param id Id of the object
* @param status_json Json representation of it.
*/
public void updateStatus(long id, String status_json) {
ContentValues cv = new ContentValues(1);
cv.put(STATUS, status_json);
cv.put(ACCOUNT_ID,account);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.update(TABLE_STATUSES,cv,"id = ?", new String[]{String.valueOf(id)});
db.close();
}
/**
* Return the blob of one stored status by its (unique) id.
* @param statusId The id of the status
* @return The json_string if the status exists in the DB or null otherwise
*/
public String getStatusObjectById(long statusId) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
String ret = null;
Cursor c;
c= db.query(TABLE_STATUSES,new String[]{STATUS},"id = ? AND " + ACCOUNT_ID_IS,new String[]{String.valueOf(statusId),account},null,null,null);
if (c.getCount()>0){
c.moveToFirst();
ret = c.getString(0);
}
c.close();
db.close();
return ret;
}
/**
* Get all statuses that are marked as a reply to the passed one.
* @param inRepyId Id of the original status
* @return List of Json_objects that represent the replies
*/
public List<String> getReplies(long inRepyId) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
List<String> ret = new ArrayList<String>();
Cursor c ;
c = db.query(TABLE_STATUSES,new String[]{STATUS}, "i_rep_to = ? && " + ACCOUNT_ID_IS
,new String[]{String.valueOf(inRepyId),account},null,null,"ID DESC");
if (c.getCount()>0) {
c.moveToFirst();
do {
String json = c.getString(0);
ret.add(json);
} while (c.moveToNext());
}
c.close();
db.close();
return ret;
}
/**
* Return a list of Responses along for the passed list id.
* @param sinceId What is the oldest status to look after
* @param howMany How many entries shall be returned
* @param list_id From which list?
* @return List of JResponse objects
*/
public List<String> getStatusesObjsOlderThan(long sinceId, int howMany, long list_id) {
List<String> ret = new ArrayList<String>();
SQLiteDatabase db = tdHelper.getReadableDatabase();
Cursor c;
if (sinceId>-1)
c = db.query(TABLE_STATUSES,new String[]{STATUS},"id < ? AND list_id = ? AND " +ACCOUNT_ID_IS,new String[]{String.valueOf(sinceId),String.valueOf(list_id),account},null,null,"ID DESC",String.valueOf(howMany));
else
c = db.query(TABLE_STATUSES,new String[]{STATUS},"list_id = ? AND " + ACCOUNT_ID_IS,new String[]{String.valueOf(list_id),account},null,null,"ID DESC",String.valueOf(howMany));
if (c.getCount()>0){
c.moveToFirst();
do {
String json = c.getString(0);
ret.add(json);
} while (c.moveToNext());
}
c.close();
db.close();
return ret;
}
public List<String> getDirectsOlderThan(int sinceId, int howMany) {
List<String> ret = new ArrayList<String>();
SQLiteDatabase db = tdHelper.getReadableDatabase();
Cursor c;
if (sinceId>-1)
c = db.query(TABLE_DIRECTS,new String[]{"MESSAGE_JSON"},"id < ? AND " +ACCOUNT_ID_IS,new String[]{String.valueOf(sinceId),account},null,null,"ID DESC",String.valueOf(howMany));
else
c = db.query(TABLE_DIRECTS,new String[]{"MESSAGE_JSON"}, ACCOUNT_ID_IS,new String[]{account},null,null,"ID DESC",String.valueOf(howMany));
if (c.getCount()>0){
c.moveToFirst();
do {
String json = c.getString(0);
ret.add(json);
} while (c.moveToNext());
}
c.close();
db.close();
return ret;
}
/**
* Purge the last read table.
*/
public void resetLastRead() {
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.execSQL("DELETE FROM " + TABLE_LAST_READ);
db.close();
}
/**
* Purge the statuses table.
*/
public void cleanTweets() {
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.execSQL("DELETE FROM " + TABLE_STATUSES);
db.close();
}
/**
* Returns a user by its ID from the database if it exists or null.
* @param userId Id of the user
* @param accountId Id of the account to use
* @return Basic JSON string of the user info or null.
*/
public String getUserById(int userId, int accountId) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
String ret = null;
Cursor c;
c = db.query(TABLE_USERS,new String[]{"user_json"},"userId = ? AND " + ACCOUNT_ID + " = ?",new String[] { String.valueOf(userId), String.valueOf(accountId)},null, null, null);
if (c.getCount()>0) {
c.moveToFirst();
ret = c.getString(0);
}
return ret;
}
/**
* Insert a user into the database.
* @param userId The Id of the user to insert
* @param json JSON representation of the User object
*/
public void insertUser(int userId, String json) {
ContentValues cv = new ContentValues(3);
cv.put("userId",userId);
cv.put(ACCOUNT_ID,account);
cv.put("user_json",json);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.insert(TABLE_USERS,null,cv);
db.close();
}
/**
* Update an existing user in the database.
* @param userId
* @param json
*/
public void updateUser(int userId, String json) {
ContentValues cv = new ContentValues(1);
cv.put("user_json",json);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.update(TABLE_USERS,cv,"userId = ? AND "+ ACCOUNT_ID + " = ?",new String[] { String.valueOf(userId),account});
db.close();
}
/**
* Insert a direct message into the DB
* @param id ID of the message
* @param json Json string of the message
*/
public void insertDirect(int id , String json) {
ContentValues cv = new ContentValues(3);
cv.put("id",id);
cv.put(ACCOUNT_ID,account);
cv.put("user_json",json);
SQLiteDatabase db = tdHelper.getWritableDatabase();
db.insert(TABLE_USERS,null,cv);
db.close();
}
/**
* Get a direct message from th DB
* @param id ID of the message to look up
* @return JSON string of the message or null if not found
*/
public String getDirectById(int id) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
String ret = null;
Cursor c;
c = db.query(TABLE_DIRECTS,new String[]{"message_json"},"id = ? AND " + ACCOUNT_ID_IS,new String[] { String.valueOf(id), account},null, null, null);
if (c.getCount()>0) {
c.moveToFirst();
ret = c.getString(0);
}
return ret;
}
/**
* Get the last <i>number</i> direct messages from the DB
* @param number Numer of messages to get
* @return List of messages or empyt list
*/
public List<String> getDirects(int number) {
SQLiteDatabase db = tdHelper.getReadableDatabase();
List<String > ret = new ArrayList<String>();
Cursor c;
c = db.query(TABLE_DIRECTS,new String[]{"message_json"},ACCOUNT_ID_IS,new String[] { account},null, null, "ID DESC",String.valueOf(number));
if (c.getCount()>0) {
c.moveToFirst();
do {
String json = c.getString(0);
ret.add(json);
} while ((c.moveToNext()));
}
return ret;
}
}
|
diff --git a/src/com/android/deskclock/Alarms.java b/src/com/android/deskclock/Alarms.java
index 1c44bdadd..7bb1ec9e9 100644
--- a/src/com/android/deskclock/Alarms.java
+++ b/src/com/android/deskclock/Alarms.java
@@ -1,630 +1,630 @@
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.deskclock;
import android.app.AlarmManager;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.CursorLoader;
import android.content.Intent;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.net.Uri;
import android.os.Parcel;
import android.provider.Settings;
import android.text.TextUtils;
import android.text.format.DateFormat;
import android.util.Pair;
import java.util.Calendar;
import java.util.HashSet;
import java.util.Set;
/**
* The Alarms provider supplies info about Alarm Clock settings
*/
public class Alarms {
static final String PREFERENCES = "AlarmClock";
// This action triggers the AlarmReceiver as well as the AlarmKlaxon. It
// is a public action used in the manifest for receiving Alarm broadcasts
// from the alarm manager.
public static final String ALARM_ALERT_ACTION = "com.android.deskclock.ALARM_ALERT";
// A public action sent by AlarmKlaxon when the alarm has stopped sounding
// for any reason (e.g. because it has been dismissed from AlarmAlertFullScreen,
// or killed due to an incoming phone call, etc).
public static final String ALARM_DONE_ACTION = "com.android.deskclock.ALARM_DONE";
// AlarmAlertFullScreen listens for this broadcast intent, so that other applications
// can snooze the alarm (after ALARM_ALERT_ACTION and before ALARM_DONE_ACTION).
public static final String ALARM_SNOOZE_ACTION = "com.android.deskclock.ALARM_SNOOZE";
// AlarmAlertFullScreen listens for this broadcast intent, so that other applications
// can dismiss the alarm (after ALARM_ALERT_ACTION and before ALARM_DONE_ACTION).
public static final String ALARM_DISMISS_ACTION = "com.android.deskclock.ALARM_DISMISS";
// A public action sent by AlarmAlertFullScreen when a snoozed alarm was dismissed due
// to it handling ALARM_DISMISS_ACTION cancelled
public static final String ALARM_SNOOZE_CANCELLED = "com.android.deskclock.ALARM_SNOOZE_CANCELLED";
// A broadcast sent every time the next alarm time is set in the system
public static final String NEXT_ALARM_TIME_SET = "com.android.deskclock.NEXT_ALARM_TIME_SET";
// This is a private action used by the AlarmKlaxon to update the UI to
// show the alarm has been killed.
public static final String ALARM_KILLED = "alarm_killed";
// Extra in the ALARM_KILLED intent to indicate to the user how long the
// alarm played before being killed.
public static final String ALARM_KILLED_TIMEOUT = "alarm_killed_timeout";
// Extra in the ALARM_KILLED intent to indicate when alarm was replaced
public static final String ALARM_REPLACED = "alarm_replaced";
// This string is used to indicate a silent alarm in the db.
public static final String ALARM_ALERT_SILENT = "silent";
// This intent is sent from the notification when the user cancels the
// snooze alert.
public static final String CANCEL_SNOOZE = "cancel_snooze";
// This string is used when passing an Alarm object through an intent.
public static final String ALARM_INTENT_EXTRA = "intent.extra.alarm";
// This extra is the raw Alarm object data. It is used in the
// AlarmManagerService to avoid a ClassNotFoundException when filling in
// the Intent extras.
public static final String ALARM_RAW_DATA = "intent.extra.alarm_raw";
private static final String PREF_SNOOZE_IDS = "snooze_ids";
private static final String PREF_SNOOZE_TIME = "snooze_time";
private final static String DM12 = "E h:mm aa";
private final static String DM24 = "E kk:mm";
private final static String M12 = "h:mm aa";
// Shared with DigitalClock
final static String M24 = "kk:mm";
final static int INVALID_ALARM_ID = -1;
/**
* Creates a new Alarm and fills in the given alarm's id.
*/
public static long addAlarm(Context context, Alarm alarm) {
ContentValues values = createContentValues(alarm);
Uri uri = context.getContentResolver().insert(
Alarm.Columns.CONTENT_URI, values);
alarm.id = (int) ContentUris.parseId(uri);
long timeInMillis = alarm.calculateAlarmTime();
if (alarm.enabled) {
clearSnoozeIfNeeded(context, timeInMillis);
}
setNextAlert(context);
return timeInMillis;
}
/**
* Removes an existing Alarm. If this alarm is snoozing, disables
* snooze. Sets next alert.
*/
public static void deleteAlarm(Context context, int alarmId) {
if (alarmId == INVALID_ALARM_ID) return;
ContentResolver contentResolver = context.getContentResolver();
/* If alarm is snoozing, lose it */
disableSnoozeAlert(context, alarmId);
Uri uri = ContentUris.withAppendedId(Alarm.Columns.CONTENT_URI, alarmId);
contentResolver.delete(uri, "", null);
setNextAlert(context);
}
public static CursorLoader getAlarmsCursorLoader(Context context) {
return new CursorLoader(context, Alarm.Columns.CONTENT_URI,
Alarm.Columns.ALARM_QUERY_COLUMNS, null, null, Alarm.Columns.DEFAULT_SORT_ORDER);
}
/**
* Queries all alarms
* @return cursor over all alarms
*/
public static Cursor getAlarmsCursor(ContentResolver contentResolver) {
return contentResolver.query(
Alarm.Columns.CONTENT_URI, Alarm.Columns.ALARM_QUERY_COLUMNS,
null, null, Alarm.Columns.DEFAULT_SORT_ORDER);
}
// Private method to get a more limited set of alarms from the database.
private static Cursor getFilteredAlarmsCursor(
ContentResolver contentResolver) {
return contentResolver.query(Alarm.Columns.CONTENT_URI,
Alarm.Columns.ALARM_QUERY_COLUMNS, Alarm.Columns.WHERE_ENABLED,
null, null);
}
private static ContentValues createContentValues(Alarm alarm) {
ContentValues values = new ContentValues(8);
// -1 means generate new id.
if (alarm.id != -1) {
values.put(Alarm.Columns._ID, alarm.id);
}
values.put(Alarm.Columns.ENABLED, alarm.enabled ? 1 : 0);
values.put(Alarm.Columns.HOUR, alarm.hour);
values.put(Alarm.Columns.MINUTES, alarm.minutes);
// We don't need this field anymore
values.put(Alarm.Columns.ALARM_TIME, 0);
values.put(Alarm.Columns.DAYS_OF_WEEK, alarm.daysOfWeek.getCoded());
values.put(Alarm.Columns.VIBRATE, alarm.vibrate);
values.put(Alarm.Columns.MESSAGE, alarm.label);
// A null alert Uri indicates a silent alarm.
values.put(Alarm.Columns.ALERT, alarm.alert == null ? ALARM_ALERT_SILENT
: alarm.alert.toString());
return values;
}
private static void clearSnoozeIfNeeded(Context context, long alarmTime) {
// If this alarm fires before the next snooze, clear the snooze to
// enable this alarm.
SharedPreferences prefs = context.getSharedPreferences(PREFERENCES, 0);
// Get the list of snoozed alarms
final Set<String> snoozedIds = prefs.getStringSet(PREF_SNOOZE_IDS, new HashSet<String>());
for (String snoozedAlarm : snoozedIds) {
final long snoozeTime = prefs.getLong(getAlarmPrefSnoozeTimeKey(snoozedAlarm), 0);
if (alarmTime < snoozeTime) {
final int alarmId = Integer.parseInt(snoozedAlarm);
clearSnoozePreference(context, prefs, alarmId);
}
}
}
/**
* Return an Alarm object representing the alarm id in the database.
* Returns null if no alarm exists.
*/
public static Alarm getAlarm(ContentResolver contentResolver, int alarmId) {
Cursor cursor = contentResolver.query(
ContentUris.withAppendedId(Alarm.Columns.CONTENT_URI, alarmId),
Alarm.Columns.ALARM_QUERY_COLUMNS,
null, null, null);
Alarm alarm = null;
if (cursor != null) {
if (cursor.moveToFirst()) {
alarm = new Alarm(cursor);
}
cursor.close();
}
return alarm;
}
/**
* A convenience method to set an alarm in the Alarms
* content provider.
* @return Time when the alarm will fire. Or < 1 if update failed.
*/
public static long setAlarm(Context context, Alarm alarm) {
ContentValues values = createContentValues(alarm);
ContentResolver resolver = context.getContentResolver();
long rowsUpdated = resolver.update(
ContentUris.withAppendedId(Alarm.Columns.CONTENT_URI, alarm.id),
values, null, null);
if (rowsUpdated < 1) {
Log.e("Error updating alarm " + alarm);
return rowsUpdated;
}
long timeInMillis = alarm.calculateAlarmTime();
if (alarm.enabled) {
// Disable the snooze if we just changed the snoozed alarm. This
// only does work if the snoozed alarm is the same as the given
// alarm.
// TODO: disableSnoozeAlert should have a better name.
disableSnoozeAlert(context, alarm.id);
// Disable the snooze if this alarm fires before the snoozed alarm.
// This works on every alarm since the user most likely intends to
// have the modified alarm fire next.
clearSnoozeIfNeeded(context, timeInMillis);
}
setNextAlert(context);
return timeInMillis;
}
/**
* A convenience method to enable or disable an alarm.
*
* @param id corresponds to the _id column
* @param enabled corresponds to the ENABLED column
*/
public static void enableAlarm(
final Context context, final int id, boolean enabled) {
enableAlarmInternal(context, id, enabled);
setNextAlert(context);
}
private static void enableAlarmInternal(final Context context,
final int id, boolean enabled) {
enableAlarmInternal(context, getAlarm(context.getContentResolver(), id),
enabled);
}
private static void enableAlarmInternal(final Context context,
final Alarm alarm, boolean enabled) {
if (alarm == null) {
return;
}
ContentResolver resolver = context.getContentResolver();
ContentValues values = new ContentValues(2);
values.put(Alarm.Columns.ENABLED, enabled ? 1 : 0);
if (!enabled) {
// Clear the snooze if the id matches.
disableSnoozeAlert(context, alarm.id);
}
resolver.update(ContentUris.withAppendedId(
Alarm.Columns.CONTENT_URI, alarm.id), values, null, null);
}
private static Pair<Alarm, Long> calculateNextAlert(final Context context) {
long minTime = Long.MAX_VALUE;
long now = System.currentTimeMillis();
final SharedPreferences prefs = context.getSharedPreferences(PREFERENCES, 0);
Set<Alarm> alarms = new HashSet<Alarm>();
// We need to to build the list of alarms from both the snoozed list and the scheduled
// list. For a non-repeating alarm, when it goes of, it becomes disabled. A snoozed
// non-repeating alarm is not in the active list in the database.
// first go through the snoozed alarms
final Set<String> snoozedIds = prefs.getStringSet(PREF_SNOOZE_IDS, new HashSet<String>());
for (String snoozedAlarm : snoozedIds) {
final int alarmId = Integer.parseInt(snoozedAlarm);
final Alarm a = getAlarm(context.getContentResolver(), alarmId);
alarms.add(a);
}
// Now add the scheduled alarms
final Cursor cursor = getFilteredAlarmsCursor(context.getContentResolver());
if (cursor != null) {
try {
if (cursor.moveToFirst()) {
do {
final Alarm a = new Alarm(cursor);
alarms.add(a);
} while (cursor.moveToNext());
}
} finally {
cursor.close();
}
}
Alarm alarm = null;
Long alarmTime = null;
for (Alarm a : alarms) {
// Update the alarm if it has been snoozed
alarmTime = hasAlarmBeenSnoozed(prefs, a.id) ?
prefs.getLong(getAlarmPrefSnoozeTimeKey(a.id), -1) :
a.calculateAlarmTime();
if (alarmTime < now) {
Log.v("Disabling expired alarm set for " + Log.formatTime(alarmTime));
// Expired alarm, disable it and move along.
enableAlarmInternal(context, a, false);
continue;
}
if (alarmTime < minTime) {
minTime = alarmTime;
alarm = a;
}
}
- return alarm != null ? Pair.create(alarm, alarmTime) : null;
+ return alarm != null ? Pair.create(alarm, minTime) : null;
}
/**
* Disables non-repeating alarms that have passed. Called at
* boot.
*/
public static void disableExpiredAlarms(final Context context) {
Cursor cur = getFilteredAlarmsCursor(context.getContentResolver());
long now = System.currentTimeMillis();
try {
if (cur.moveToFirst()) {
do {
Alarm alarm = new Alarm(cur);
// Ignore repeatable alarms
if (alarm.daysOfWeek.isRepeatSet()) {
continue;
}
long alarmTime = alarm.calculateAlarmTime();
if (alarmTime < now) {
Log.v("Disabling expired alarm set for " + Log.formatTime(alarmTime));
enableAlarmInternal(context, alarm, false);
}
} while (cur.moveToNext());
}
} finally {
cur.close();
}
}
/**
* Called at system startup, on time/timezone change, and whenever
* the user changes alarm settings. Activates snooze if set,
* otherwise loads all alarms, activates next alert.
*/
public static void setNextAlert(final Context context) {
Pair<Alarm, Long> alarm = calculateNextAlert(context);
if (alarm != null) {
enableAlert(context, alarm.first, alarm.second);
} else {
disableAlert(context);
}
Intent i = new Intent(NEXT_ALARM_TIME_SET);
context.sendBroadcast(i);
}
/**
* Sets alert in AlarmManger and StatusBar. This is what will
* actually launch the alert when the alarm triggers.
*
* @param alarm Alarm.
* @param atTimeInMillis milliseconds since epoch
*/
private static void enableAlert(Context context, final Alarm alarm,
final long atTimeInMillis) {
AlarmManager am = (AlarmManager)
context.getSystemService(Context.ALARM_SERVICE);
// Intentionally verbose: always log the alarm time to provide useful
// information in bug reports.
Log.v("Alarm set for id=" + alarm.id + " " + Log.formatTime(atTimeInMillis));
Intent intent = new Intent(ALARM_ALERT_ACTION);
// XXX: This is a slight hack to avoid an exception in the remote
// AlarmManagerService process. The AlarmManager adds extra data to
// this Intent which causes it to inflate. Since the remote process
// does not know about the Alarm class, it throws a
// ClassNotFoundException.
//
// To avoid this, we marshall the data ourselves and then parcel a plain
// byte[] array. The AlarmReceiver class knows to build the Alarm
// object from the byte[] array.
Parcel out = Parcel.obtain();
alarm.writeToParcel(out, 0);
out.setDataPosition(0);
intent.putExtra(ALARM_RAW_DATA, out.marshall());
PendingIntent sender = PendingIntent.getBroadcast(
context, 0, intent, PendingIntent.FLAG_CANCEL_CURRENT);
if (Utils.isKeyLimePieOrLater()) {
am.setExact(AlarmManager.RTC_WAKEUP, atTimeInMillis, sender);
} else {
am.set(AlarmManager.RTC_WAKEUP, atTimeInMillis, sender);
}
setStatusBarIcon(context, true);
Calendar c = Calendar.getInstance();
c.setTimeInMillis(atTimeInMillis);
String timeString = formatDayAndTime(context, c);
saveNextAlarm(context, timeString);
}
/**
* Disables alert in AlarmManager and StatusBar.
*
* @param context The context
*/
static void disableAlert(Context context) {
AlarmManager am = (AlarmManager)
context.getSystemService(Context.ALARM_SERVICE);
PendingIntent sender = PendingIntent.getBroadcast(
context, 0, new Intent(ALARM_ALERT_ACTION),
PendingIntent.FLAG_CANCEL_CURRENT);
am.cancel(sender);
setStatusBarIcon(context, false);
// Intentionally verbose: always log the lack of a next alarm to provide useful
// information in bug reports.
Log.v("No next alarm");
saveNextAlarm(context, "");
}
// Snoozes are affected by timezone changes, but this shouldn't be a problem
// for real use-cases.
static void saveSnoozeAlert(final Context context, final int id,
final long time) {
SharedPreferences prefs = context.getSharedPreferences(PREFERENCES, 0);
if (id == INVALID_ALARM_ID) {
clearAllSnoozePreferences(context, prefs);
} else {
final Set<String> snoozedIds =
prefs.getStringSet(PREF_SNOOZE_IDS, new HashSet<String>());
snoozedIds.add(Integer.toString(id));
final SharedPreferences.Editor ed = prefs.edit();
ed.putStringSet(PREF_SNOOZE_IDS, snoozedIds);
ed.putLong(getAlarmPrefSnoozeTimeKey(id), time);
ed.apply();
}
// Set the next alert after updating the snooze.
setNextAlert(context);
}
private static String getAlarmPrefSnoozeTimeKey(int id) {
return getAlarmPrefSnoozeTimeKey(Integer.toString(id));
}
private static String getAlarmPrefSnoozeTimeKey(String id) {
return PREF_SNOOZE_TIME + id;
}
/**
* Disable the snooze alert if the given id matches the snooze id.
*/
static void disableSnoozeAlert(final Context context, final int id) {
SharedPreferences prefs = context.getSharedPreferences(PREFERENCES, 0);
if (hasAlarmBeenSnoozed(prefs, id)) {
// This is the same id so clear the shared prefs.
clearSnoozePreference(context, prefs, id);
}
}
// Helper to remove the snooze preference. Do not use clear because that
// will erase the clock preferences. Also clear the snooze notification in
// the window shade.
private static void clearSnoozePreference(final Context context,
final SharedPreferences prefs, final int id) {
final String alarmStr = Integer.toString(id);
final Set<String> snoozedIds =
prefs.getStringSet(PREF_SNOOZE_IDS, new HashSet<String>());
if (snoozedIds.contains(alarmStr)) {
NotificationManager nm = (NotificationManager)
context.getSystemService(Context.NOTIFICATION_SERVICE);
nm.cancel(id);
}
final SharedPreferences.Editor ed = prefs.edit();
snoozedIds.remove(alarmStr);
ed.putStringSet(PREF_SNOOZE_IDS, snoozedIds);
ed.remove(getAlarmPrefSnoozeTimeKey(alarmStr));
ed.apply();
}
private static void clearAllSnoozePreferences(final Context context,
final SharedPreferences prefs) {
NotificationManager nm = (NotificationManager)
context.getSystemService(Context.NOTIFICATION_SERVICE);
final Set<String> snoozedIds =
prefs.getStringSet(PREF_SNOOZE_IDS, new HashSet<String>());
final SharedPreferences.Editor ed = prefs.edit();
for (String snoozeId : snoozedIds) {
nm.cancel(Integer.parseInt(snoozeId));
ed.remove(getAlarmPrefSnoozeTimeKey(snoozeId));
}
ed.remove(PREF_SNOOZE_IDS);
ed.apply();
}
private static boolean hasAlarmBeenSnoozed(final SharedPreferences prefs, final int alarmId) {
final Set<String> snoozedIds = prefs.getStringSet(PREF_SNOOZE_IDS, null);
// Return true if there a valid snoozed alarmId was saved
return snoozedIds != null && snoozedIds.contains(Integer.toString(alarmId));
}
/**
* Tells the StatusBar whether the alarm is enabled or disabled
*/
private static void setStatusBarIcon(Context context, boolean enabled) {
Intent alarmChanged = new Intent("android.intent.action.ALARM_CHANGED");
alarmChanged.putExtra("alarmSet", enabled);
context.sendBroadcast(alarmChanged);
}
/**
* Given an alarm in hours and minutes, return a time suitable for
* setting in AlarmManager.
*/
static Calendar calculateAlarm(int hour, int minute,
Alarm.DaysOfWeek daysOfWeek) {
// start with now
Calendar c = Calendar.getInstance();
c.setTimeInMillis(System.currentTimeMillis());
int nowHour = c.get(Calendar.HOUR_OF_DAY);
int nowMinute = c.get(Calendar.MINUTE);
// if alarm is behind current time, advance one day
if ((hour < nowHour || (hour == nowHour && minute <= nowMinute))) {
c.add(Calendar.DAY_OF_YEAR, 1);
}
c.set(Calendar.HOUR_OF_DAY, hour);
c.set(Calendar.MINUTE, minute);
c.set(Calendar.SECOND, 0);
c.set(Calendar.MILLISECOND, 0);
int addDays = daysOfWeek.getNextAlarm(c);
if (addDays > 0) c.add(Calendar.DAY_OF_WEEK, addDays);
return c;
}
static String formatTime(final Context context, int hour, int minute,
Alarm.DaysOfWeek daysOfWeek) {
Calendar c = calculateAlarm(hour, minute, daysOfWeek);
return formatTime(context, c);
}
/* used by AlarmAlert */
static String formatTime(final Context context, Calendar c) {
String format = get24HourMode(context) ? M24 : M12;
return (c == null) ? "" : (String)DateFormat.format(format, c);
}
/**
* Shows day and time -- used for lock screen
*/
private static String formatDayAndTime(final Context context, Calendar c) {
String format = get24HourMode(context) ? DM24 : DM12;
return (c == null) ? "" : (String)DateFormat.format(format, c);
}
/**
* Save time of the next alarm, as a formatted string, into the system
* settings so those who care can make use of it.
*/
static void saveNextAlarm(final Context context, String timeString) {
Log.v("Setting next alarm string in system to " +
(TextUtils.isEmpty(timeString) ? "null" : timeString));
Settings.System.putString(context.getContentResolver(),
Settings.System.NEXT_ALARM_FORMATTED,
timeString);
}
/**
* @return true if clock is set to 24-hour mode
*/
public static boolean get24HourMode(final Context context) {
return android.text.format.DateFormat.is24HourFormat(context);
}
}
| true
| true
|
private static Pair<Alarm, Long> calculateNextAlert(final Context context) {
long minTime = Long.MAX_VALUE;
long now = System.currentTimeMillis();
final SharedPreferences prefs = context.getSharedPreferences(PREFERENCES, 0);
Set<Alarm> alarms = new HashSet<Alarm>();
// We need to to build the list of alarms from both the snoozed list and the scheduled
// list. For a non-repeating alarm, when it goes of, it becomes disabled. A snoozed
// non-repeating alarm is not in the active list in the database.
// first go through the snoozed alarms
final Set<String> snoozedIds = prefs.getStringSet(PREF_SNOOZE_IDS, new HashSet<String>());
for (String snoozedAlarm : snoozedIds) {
final int alarmId = Integer.parseInt(snoozedAlarm);
final Alarm a = getAlarm(context.getContentResolver(), alarmId);
alarms.add(a);
}
// Now add the scheduled alarms
final Cursor cursor = getFilteredAlarmsCursor(context.getContentResolver());
if (cursor != null) {
try {
if (cursor.moveToFirst()) {
do {
final Alarm a = new Alarm(cursor);
alarms.add(a);
} while (cursor.moveToNext());
}
} finally {
cursor.close();
}
}
Alarm alarm = null;
Long alarmTime = null;
for (Alarm a : alarms) {
// Update the alarm if it has been snoozed
alarmTime = hasAlarmBeenSnoozed(prefs, a.id) ?
prefs.getLong(getAlarmPrefSnoozeTimeKey(a.id), -1) :
a.calculateAlarmTime();
if (alarmTime < now) {
Log.v("Disabling expired alarm set for " + Log.formatTime(alarmTime));
// Expired alarm, disable it and move along.
enableAlarmInternal(context, a, false);
continue;
}
if (alarmTime < minTime) {
minTime = alarmTime;
alarm = a;
}
}
return alarm != null ? Pair.create(alarm, alarmTime) : null;
}
|
private static Pair<Alarm, Long> calculateNextAlert(final Context context) {
long minTime = Long.MAX_VALUE;
long now = System.currentTimeMillis();
final SharedPreferences prefs = context.getSharedPreferences(PREFERENCES, 0);
Set<Alarm> alarms = new HashSet<Alarm>();
// We need to to build the list of alarms from both the snoozed list and the scheduled
// list. For a non-repeating alarm, when it goes of, it becomes disabled. A snoozed
// non-repeating alarm is not in the active list in the database.
// first go through the snoozed alarms
final Set<String> snoozedIds = prefs.getStringSet(PREF_SNOOZE_IDS, new HashSet<String>());
for (String snoozedAlarm : snoozedIds) {
final int alarmId = Integer.parseInt(snoozedAlarm);
final Alarm a = getAlarm(context.getContentResolver(), alarmId);
alarms.add(a);
}
// Now add the scheduled alarms
final Cursor cursor = getFilteredAlarmsCursor(context.getContentResolver());
if (cursor != null) {
try {
if (cursor.moveToFirst()) {
do {
final Alarm a = new Alarm(cursor);
alarms.add(a);
} while (cursor.moveToNext());
}
} finally {
cursor.close();
}
}
Alarm alarm = null;
Long alarmTime = null;
for (Alarm a : alarms) {
// Update the alarm if it has been snoozed
alarmTime = hasAlarmBeenSnoozed(prefs, a.id) ?
prefs.getLong(getAlarmPrefSnoozeTimeKey(a.id), -1) :
a.calculateAlarmTime();
if (alarmTime < now) {
Log.v("Disabling expired alarm set for " + Log.formatTime(alarmTime));
// Expired alarm, disable it and move along.
enableAlarmInternal(context, a, false);
continue;
}
if (alarmTime < minTime) {
minTime = alarmTime;
alarm = a;
}
}
return alarm != null ? Pair.create(alarm, minTime) : null;
}
|
diff --git a/lib/stackConfiguration/src/main/java/org/sagebionetworks/TemplatedConfiguration.java b/lib/stackConfiguration/src/main/java/org/sagebionetworks/TemplatedConfiguration.java
index 295764bd..deb5cff1 100644
--- a/lib/stackConfiguration/src/main/java/org/sagebionetworks/TemplatedConfiguration.java
+++ b/lib/stackConfiguration/src/main/java/org/sagebionetworks/TemplatedConfiguration.java
@@ -1,390 +1,390 @@
package org.sagebionetworks;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Properties;
import java.util.Set;
import org.apache.log4j.Logger;
/**
* TemplatedConfiguration should serve as the base for any configuration we need
* for any of our Java software. It encapsulates the core functionality of a
* default properties file, a property override file, checking of that override
* against a template, encrypted properties, and loading properties files from
* S3. It also exposes some properties common to all software stacks.
*
* Here's the first stab at a configuration system for our various stacks. It
* solves several problems we are currently having:
*
* (1) a proliferation of system properties for non-password and non-credential
* configuration values
*
* (2) a way to build one artifact that can run on many stacks
*
* (3) a place to encapsulate and limit the scope of property names, components
* that depend upon this retrieve values by method instead of by property name
*
* (4) standardization of property names because since they are close together,
* we can all see the naming pattern
*/
public class TemplatedConfiguration {
private static final Logger log = Logger
.getLogger(TemplatedConfiguration.class.getName());
private String defaultPropertiesFilename;
private String templatePropertiesFilename;
private Properties defaultStackProperties = null;
private Properties stackPropertyOverrides = null;
private Properties requiredProperties = null;
private String propertyFileUrl = null;
/**
* Pass in the default location for the properties file and also the
* template to use
*
* @param defaultPropertiesFilename
* @param templatePropertiesFilename
*/
public TemplatedConfiguration(String defaultPropertiesFilename,
String templatePropertiesFilename) {
this.defaultPropertiesFilename = defaultPropertiesFilename;
this.templatePropertiesFilename = templatePropertiesFilename;
}
/**
* Load stack configuration from properties files. Note that the System
* property org.sagebionetworks.stack is used to let the system know for
* which stack overrides should be loaded.
*/
public void reloadStackConfiguration() {
defaultStackProperties = new Properties();
stackPropertyOverrides = new Properties();
requiredProperties = new Properties();
// Load the default properties from the classpath.
loadPropertiesFromClasspath(defaultPropertiesFilename,
defaultStackProperties);
// Load the required properties
loadPropertiesFromClasspath(templatePropertiesFilename,
requiredProperties);
// If the system properties does not have the property file url,
// then we need to try and load the maven settings file.
if (System.getProperty(StackConstants.STACK_PROPERTY_FILE_URL) == null) {
// Try loading the settings file
addSettingsPropertiesToSystem();
}
// These three properties are required. If they are null, an exception
// will be thrown
String encryptionKey = getEncryptionKey();
String stack = getStack();
String stackInstance = getStackInstance();
propertyFileUrl = getPropertyOverridesFileURL();
- if (null != propertyFileUrl) {
+ if ((null != propertyFileUrl) && (0 < propertyFileUrl.length())) {
// Validate the property file
StackUtils.validateStackProperty(stack + stackInstance,
StackConstants.STACK_PROPERTY_FILE_URL, propertyFileUrl);
// If we have IAM id and key the load the properties using the
// Amazon
// client, else the URL should be public.
String iamId = getIAMUserId();
String iamKey = getIAMUserKey();
if (propertyFileUrl
.startsWith(StackConstants.S3_PROPERTY_FILENAME_PREFIX)
&& iamId != null && iamKey != null) {
try {
S3PropertyFileLoader.loadPropertiesFromS3(propertyFileUrl,
iamId, iamKey, stackPropertyOverrides);
} catch (IOException e) {
throw new RuntimeException(e);
}
} else {
loadPropertiesFromURL(propertyFileUrl, stackPropertyOverrides);
}
// Validate the required properties
StackUtils.validateRequiredProperties(requiredProperties,
stackPropertyOverrides, stack, stackInstance);
}
}
public String getProperty(String propertyName) {
String propertyValue = null;
log
.info(propertyName
+ "="
+ System.getProperty(propertyName)
+ " from System properties (just FYI if replacement syntax was used in the props file)");
if (stackPropertyOverrides.containsKey(propertyName)) {
propertyValue = stackPropertyOverrides.getProperty(propertyName);
log.info(propertyName + "=" + propertyValue
+ " from stack property overrides " + propertyFileUrl);
} else {
propertyValue = defaultStackProperties.getProperty(propertyName);
log.info(propertyName + "=" + propertyValue
+ " from default stack properties "
+ defaultPropertiesFilename);
}
// NullPointerExceptions further downstream are not very helpful, throw
// here
// instead. In general folks calling methods here do not want null
// values,
// but if they do, they can try/catch.
//
// Also note that required properties should be checked for existence by
// out template
// so this should only happen for optional properties that code is
// requesting
if (null == propertyValue) {
throw new NullPointerException(
"no value found in StackConfiguration for property "
+ propertyName + " propertyFileURL="
+ propertyFileUrl);
}
return propertyValue;
}
public Set<String> getAllPropertyNames() {
Set<String> allPropertyNames = new HashSet<String>();
allPropertyNames.addAll(defaultStackProperties.stringPropertyNames());
allPropertyNames.addAll(stackPropertyOverrides.stringPropertyNames());
return allPropertyNames;
}
public String getDecryptedProperty(String propertyName) {
String stackEncryptionKey = getEncryptionKey();
if (stackEncryptionKey == null || stackEncryptionKey.length() == 0)
throw new RuntimeException(
"Expected system property org.sagebionetworks.stackEncryptionKey");
String encryptedProperty = getProperty(propertyName);
if (encryptedProperty == null || encryptedProperty.length() == 0)
throw new RuntimeException("Expected property for " + propertyName);
StringEncrypter se = new StringEncrypter(stackEncryptionKey);
String clearTextPassword = se.decrypt(encryptedProperty);
return clearTextPassword;
}
private void loadPropertiesFromClasspath(String filename,
Properties properties) {
if (filename == null)
throw new IllegalArgumentException("filename cannot be null");
if (properties == null)
throw new IllegalArgumentException("properties cannot be null");
URL propertiesLocation = TemplatedConfiguration.class
.getResource(filename);
if (null == propertiesLocation) {
throw new IllegalArgumentException(
"Could not load property file from classpath: " + filename);
}
try {
properties.load(propertiesLocation.openStream());
} catch (Exception e) {
throw new Error(e);
}
}
/**
* Add the properties from the settings file to the system properties if
* they are there.
*/
private void addSettingsPropertiesToSystem() {
Properties props;
try {
props = SettingsLoader.loadSettingsFile();
if (props != null) {
Iterator it = props.keySet().iterator();
while (it.hasNext()) {
String key = (String) it.next();
String value = props.getProperty(key);
System.setProperty(key, value);
}
}
} catch (Exception e) {
throw new Error(e);
}
}
/**
* Load a property file from a URL
*
* @param url
* @param properties
* @return
*/
private void loadPropertiesFromURL(String url, Properties properties) {
if (url == null)
throw new IllegalArgumentException("url cannot be null");
if (properties == null)
throw new IllegalArgumentException("properties cannot be null");
URL propertiesLocation;
try {
propertiesLocation = new URL(url);
} catch (MalformedURLException e1) {
throw new IllegalArgumentException(
"Could not load property file from url: " + url, e1);
}
try {
properties.load(propertiesLocation.openStream());
} catch (Exception e) {
throw new Error(e);
}
}
/**
* Throws the same RuntimeException when a required property is missing.
*
* @param propertyKey
* @param alternate
*/
private void throwRequiredPropertyException(String propertyKey,
String alternate) {
throw new RuntimeException("The property: " + propertyKey
+ " or its alternate: " + alternate
+ " is required and cannot be null");
}
/**
* The location of the property file that overrides configuration
* properties.
*
* @return
*/
public String getPropertyOverridesFileURL() {
String url = System.getProperty(StackConstants.PARAM1);
if (url == null)
url = System.getProperty(StackConstants.STACK_PROPERTY_FILE_URL);
return url;
}
/**
* The encryption key used to read passwords in the configuration property
* file.
*
* @return
*/
public String getEncryptionKey() {
String ek = System.getProperty(StackConstants.PARAM2);
if (ek == null)
ek = System.getProperty(StackConstants.STACK_ENCRYPTION_KEY);
if (ek == null)
throwRequiredPropertyException(StackConstants.STACK_ENCRYPTION_KEY,
StackConstants.PARAM2);
return ek;
}
/**
* The name of the stack.
*
* @return
*/
public String getStack() {
String stack = System.getProperty(StackConstants.PARAM3);
if (stack == null)
stack = System.getProperty(StackConstants.STACK_PROPERTY_NAME);
if (stack == null)
throwRequiredPropertyException(StackConstants.STACK_PROPERTY_NAME,
StackConstants.PARAM3);
return stack;
}
/**
* The stack instance (i.e 'A', or 'B')
*
* @return
*/
public String getStackInstance() {
String instance = System.getProperty(StackConstants.PARAM4);
if (instance == null)
instance = System
.getProperty(StackConstants.STACK_INSTANCE_PROPERTY_NAME);
if (instance == null)
throwRequiredPropertyException(
StackConstants.STACK_INSTANCE_PROPERTY_NAME,
StackConstants.PARAM4);
return instance;
}
/**
* Get the IAM user ID (Access Key ID)
*
* @return
*/
public String getIAMUserId() {
// There are a few places where we can find this
String id = System.getProperty("AWS_ACCESS_KEY_ID");
if (id != null)
return id;
id = System.getProperty(StackConstants.STACK_IAM_ID);
if (id == null)
return null;
id = id.trim();
if ("".equals(id))
return null;
return id;
}
/**
* Get the IAM user Key (Secret Access Key)
*
* @return
*/
public String getIAMUserKey() {
// There are a few places to look for this
String key = System.getProperty("AWS_SECRET_KEY");
if (key != null)
return key;
key = System.getProperty(StackConstants.STACK_IAM_KEY);
if (key == null)
return null;
key = key.trim();
if ("".equals(key))
return null;
return key;
}
public String getAuthenticationServicePrivateEndpoint() {
return getProperty("org.sagebionetworks.authenticationservice.privateendpoint");
}
public String getAuthenticationServicePublicEndpoint() {
return getProperty("org.sagebionetworks.authenticationservice.publicendpoint");
}
public String getRepositoryServiceEndpoint() {
return getProperty("org.sagebionetworks.repositoryservice.endpoint");
}
public String getPortalEndpoint() {
return getProperty("org.sagebionetworks.portal.endpoint");
}
/**
* The repository Apache HttpClient connection pool properties
*
* @return the max number of connections per route
*/
public int getHttpClientMaxConnsPerRoute() {
// We get connection timeouts from HttpClient if max conns is zero,
// which is a confusing
// error, so instead check more vigorously for that configuration
// mistake
String maxConnsPropertyName = "org.sagebionetworks.httpclient.connectionpool.maxconnsperroute";
int maxConns = Integer.parseInt(getProperty(maxConnsPropertyName));
if (1 > maxConns) {
throw new IllegalArgumentException(maxConnsPropertyName
+ " must be greater than zero");
}
return maxConns;
}
public String getRScriptPath() {
return getProperty("org.sagebionetworks.rScript.path");
}
}
| true
| true
|
public void reloadStackConfiguration() {
defaultStackProperties = new Properties();
stackPropertyOverrides = new Properties();
requiredProperties = new Properties();
// Load the default properties from the classpath.
loadPropertiesFromClasspath(defaultPropertiesFilename,
defaultStackProperties);
// Load the required properties
loadPropertiesFromClasspath(templatePropertiesFilename,
requiredProperties);
// If the system properties does not have the property file url,
// then we need to try and load the maven settings file.
if (System.getProperty(StackConstants.STACK_PROPERTY_FILE_URL) == null) {
// Try loading the settings file
addSettingsPropertiesToSystem();
}
// These three properties are required. If they are null, an exception
// will be thrown
String encryptionKey = getEncryptionKey();
String stack = getStack();
String stackInstance = getStackInstance();
propertyFileUrl = getPropertyOverridesFileURL();
if (null != propertyFileUrl) {
// Validate the property file
StackUtils.validateStackProperty(stack + stackInstance,
StackConstants.STACK_PROPERTY_FILE_URL, propertyFileUrl);
// If we have IAM id and key the load the properties using the
// Amazon
// client, else the URL should be public.
String iamId = getIAMUserId();
String iamKey = getIAMUserKey();
if (propertyFileUrl
.startsWith(StackConstants.S3_PROPERTY_FILENAME_PREFIX)
&& iamId != null && iamKey != null) {
try {
S3PropertyFileLoader.loadPropertiesFromS3(propertyFileUrl,
iamId, iamKey, stackPropertyOverrides);
} catch (IOException e) {
throw new RuntimeException(e);
}
} else {
loadPropertiesFromURL(propertyFileUrl, stackPropertyOverrides);
}
// Validate the required properties
StackUtils.validateRequiredProperties(requiredProperties,
stackPropertyOverrides, stack, stackInstance);
}
}
|
public void reloadStackConfiguration() {
defaultStackProperties = new Properties();
stackPropertyOverrides = new Properties();
requiredProperties = new Properties();
// Load the default properties from the classpath.
loadPropertiesFromClasspath(defaultPropertiesFilename,
defaultStackProperties);
// Load the required properties
loadPropertiesFromClasspath(templatePropertiesFilename,
requiredProperties);
// If the system properties does not have the property file url,
// then we need to try and load the maven settings file.
if (System.getProperty(StackConstants.STACK_PROPERTY_FILE_URL) == null) {
// Try loading the settings file
addSettingsPropertiesToSystem();
}
// These three properties are required. If they are null, an exception
// will be thrown
String encryptionKey = getEncryptionKey();
String stack = getStack();
String stackInstance = getStackInstance();
propertyFileUrl = getPropertyOverridesFileURL();
if ((null != propertyFileUrl) && (0 < propertyFileUrl.length())) {
// Validate the property file
StackUtils.validateStackProperty(stack + stackInstance,
StackConstants.STACK_PROPERTY_FILE_URL, propertyFileUrl);
// If we have IAM id and key the load the properties using the
// Amazon
// client, else the URL should be public.
String iamId = getIAMUserId();
String iamKey = getIAMUserKey();
if (propertyFileUrl
.startsWith(StackConstants.S3_PROPERTY_FILENAME_PREFIX)
&& iamId != null && iamKey != null) {
try {
S3PropertyFileLoader.loadPropertiesFromS3(propertyFileUrl,
iamId, iamKey, stackPropertyOverrides);
} catch (IOException e) {
throw new RuntimeException(e);
}
} else {
loadPropertiesFromURL(propertyFileUrl, stackPropertyOverrides);
}
// Validate the required properties
StackUtils.validateRequiredProperties(requiredProperties,
stackPropertyOverrides, stack, stackInstance);
}
}
|
diff --git a/src/littlegruz/autoruncommands/CommandMain.java b/src/littlegruz/autoruncommands/CommandMain.java
index cd4e544..ccb2f2a 100644
--- a/src/littlegruz/autoruncommands/CommandMain.java
+++ b/src/littlegruz/autoruncommands/CommandMain.java
@@ -1,604 +1,604 @@
package littlegruz.autoruncommands;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.Map.Entry;
import java.util.UUID;
import java.util.logging.Logger;
import littlegruz.autoruncommands.listeners.CommandBlockListener;
import littlegruz.autoruncommands.listeners.CommandEntityListener;
import littlegruz.autoruncommands.listeners.CommandPlayerListener;
import littlegruz.autoruncommands.listeners.CommandServerListener;
import org.bukkit.Location;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.event.Event;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.java.JavaPlugin;
public class CommandMain extends JavaPlugin{
Logger log = Logger.getLogger("This is MINECRAFT!");
private final CommandPlayerListener playerListener = new CommandPlayerListener(this);
private final CommandBlockListener blockListener = new CommandBlockListener(this);
private final CommandEntityListener entityListener = new CommandEntityListener(this);
private final CommandServerListener serverListener = new CommandServerListener(this);
private HashMap<String, String> playerCommandMap;
private HashMap<Location, String> blockCommandMap;
private HashMap<String, String> commandMap;
private HashMap<String, Location> playerPosMap;
private HashMap<String, String> deathCommandMap;
private File playerFile;
private File commandFile;
private File blockFile;
private File deathFile;
private File startupFile;
private boolean placeBlock;
private boolean startupDone;
private String blockCommand;
private String startupCommands;
public void onDisable(){
//Save player data
try{
BufferedWriter bw = new BufferedWriter(new FileWriter(playerFile));
Iterator<Map.Entry<String, String>> it = playerCommandMap.entrySet().iterator();
//Save the players and corresponding commands
bw.write("<Player> <Command>\n");
while(it.hasNext()){
Entry<String, String> mp = it.next();
bw.write(mp.getKey() + " " + mp.getValue() + "\n");
}
bw.close();
}catch(IOException e){
log.info("Error saving player command file");
}
//Save block data
try{
BufferedWriter bw = new BufferedWriter(new FileWriter(blockFile));
Iterator<Map.Entry<Location, String>> it = blockCommandMap.entrySet().iterator();
//Save the blocks and corresponding commands
bw.write("<Block Location> <Command>\n");
while(it.hasNext()){
Entry<Location, String> mp = it.next();
bw.write(mp.getKey().getWorld().getUID().toString() + " "
+ Double.toString(mp.getKey().getX()) + " "
+ Double.toString(mp.getKey().getY()) + " "
+ Double.toString(mp.getKey().getZ()) + " "
+ mp.getValue() + "\n");
}
bw.close();
}catch(IOException e){
log.info("Error saving block command file");
}
//Save player death data
try{
BufferedWriter bw = new BufferedWriter(new FileWriter(deathFile));
Iterator<Map.Entry<String, String>> it = deathCommandMap.entrySet().iterator();
//Save the players and corresponding commands
bw.write("<Player> <Command>\n");
while(it.hasNext()){
Entry<String, String> mp = it.next();
bw.write(mp.getKey() + " eath" + mp.getValue() + "\n");
}
bw.close();
}catch(IOException e){
log.info("Error saving player death command file");
}
//Save server start up data
try{
BufferedWriter bw = new BufferedWriter(new FileWriter(startupFile));
StringTokenizer st = new StringTokenizer(startupCommands, ":");
while(st.countTokens() > 0){
bw.write(st.nextToken() + "\n");
}
bw.close();
}catch(IOException e){
log.info("Error saving server start up command file");
}
//Save command data
try{
BufferedWriter bw = new BufferedWriter(new FileWriter(commandFile));
Iterator<Map.Entry<String, String>> it = commandMap.entrySet().iterator();
//Save the blocks and corresponding commands
bw.write("<Identifing name> <Command>\n");
while(it.hasNext()){
Entry<String, String> mp = it.next();
bw.write(mp.getKey() + " " + mp.getValue() + "\n");
}
bw.close();
}catch(IOException e){
log.info("Error saving command file");
}
log.info("Autorun Commands v2.3 is melting! MELTING!");
}
public void onEnable(){
//Create the directory and files if needed
new File(getDataFolder().toString()).mkdir();
playerFile = new File(getDataFolder().toString() + "/playerList.txt");
commandFile = new File(getDataFolder().toString() + "/commands.txt");
blockFile = new File(getDataFolder().toString() + "/blockList.txt");
deathFile = new File(getDataFolder().toString() + "/deathList.txt");
startupFile = new File(getDataFolder().toString() + "/startupCommands.txt");
//Load the player file data
playerCommandMap = new HashMap<String, String>();
try{
BufferedReader br = new BufferedReader(new FileReader(playerFile));
StringTokenizer st;
String input;
String name;
String command;
while((input = br.readLine()) != null){
if(input.compareToIgnoreCase("<Player> <Command>") == 0){
continue;
}
st = new StringTokenizer(input, " ");
name = st.nextToken();
command = st.nextToken();
playerCommandMap.put(name, command);
}
}catch(FileNotFoundException e){
log.info("No original player command file, creating new one.");
}catch(IOException e){
log.info("Error reading player command file");
}catch(Exception e){
log.info("Incorrectly formatted player command file");
}
//Load the block file data
blockCommandMap = new HashMap<Location, String>();
try{
BufferedReader br = new BufferedReader(new FileReader(blockFile));
StringTokenizer st;
String input;
String command;
Location loc = null;
while((input = br.readLine()) != null){
if(input.compareToIgnoreCase("<Block Location> <Command>") == 0){
continue;
}
st = new StringTokenizer(input, " ");
loc = new Location(getServer().getWorld(UUID.fromString(st.nextToken())), Double.parseDouble(st.nextToken()), Double.parseDouble(st.nextToken()), Double.parseDouble(st.nextToken()));
command = st.nextToken();
blockCommandMap.put(loc, command);
}
}catch(FileNotFoundException e){
log.info("No original block command file, creating new one.");
}catch(IOException e){
log.info("Error reading block command file");
}catch(Exception e){
log.info("Incorrectly formatted block command file");
}
//Load the player death file data
deathCommandMap = new HashMap<String, String>();
try{
BufferedReader br = new BufferedReader(new FileReader(deathFile));
StringTokenizer st;
String input;
String name;
String command;
while((input = br.readLine()) != null){
if(input.compareToIgnoreCase("<Player> <Command>") == 0){
continue;
}
st = new StringTokenizer(input, " ");
name = st.nextToken();
command = st.nextToken();
deathCommandMap.put(name, command);
}
}catch(FileNotFoundException e){
log.info("No original player death command file, creating new one.");
}catch(IOException e){
log.info("Error reading player death command file");
}catch(Exception e){
log.info("Incorrectly formatted player death command file");
}
//Load the start up data
startupCommands = "";
try{
BufferedReader br = new BufferedReader(new FileReader(startupFile));
String input;
while((input = br.readLine()) != null){
if(input.compareToIgnoreCase("<Command>") == 0){
continue;
}
startupCommands += ":" + input;
}
}catch(FileNotFoundException e){
log.info("No original start up command file, creating new one.");
}catch(IOException e){
log.info("Error reading start up command file");
}catch(Exception e){
log.info("Incorrectly formatted start up command file");
}
//Load the command file data
commandMap = new HashMap<String, String>();
try{
BufferedReader br = new BufferedReader(new FileReader(commandFile));
StringTokenizer st;
String input;
String args;
String name;
//Assumes that the name is only one token long
while((input = br.readLine()) != null){
if(input.compareToIgnoreCase("<Identifing name> <Command>") == 0){
continue;
}
st = new StringTokenizer(input, " ");
name = st.nextToken();
args = st.nextToken();
while(st.hasMoreTokens()){
args += " " + st.nextToken();
}
commandMap.put(name, args);
}
}catch(FileNotFoundException e){
log.info("No original command file, creating new one.");
}catch(IOException e){
log.info("Error reading command file");
}catch(Exception e){
log.info("Incorrectly formatted command file");
}
placeBlock = false;
startupDone = false;
blockCommand = "";
playerPosMap = new HashMap<String, Location>();
//Set up the listeners
PluginManager pm = this.getServer().getPluginManager();
pm.registerEvent(Event.Type.PLAYER_INTERACT_ENTITY, playerListener, Event.Priority.Normal, this);
pm.registerEvent(Event.Type.PLAYER_INTERACT, playerListener, Event.Priority.Normal, this);
pm.registerEvent(Event.Type.PLAYER_MOVE, playerListener, Event.Priority.Normal, this);
pm.registerEvent(Event.Type.PLAYER_JOIN, playerListener, Event.Priority.Normal, this);
pm.registerEvent(Event.Type.PLAYER_QUIT, playerListener, Event.Priority.Normal, this);
pm.registerEvent(Event.Type.ENTITY_DEATH, entityListener, Event.Priority.Normal, this);
pm.registerEvent(Event.Type.BLOCK_BREAK, blockListener, Event.Priority.Normal, this);
pm.registerEvent(Event.Type.PLUGIN_ENABLE, serverListener, Event.Priority.Normal, this);
log.info("Autorun Commands v2.3 is enabled");
}
public boolean onCommand(CommandSender sender, Command cmd, String commandLabel, String[] args){
if(commandLabel.compareToIgnoreCase("setclickcommand") == 0){
if(sender.hasPermission("autoruncommands.setclick")){
if(args.length != 0){
String command = args[0];
String associate;
if(args.length == 2)
associate = args[1];
else
associate = sender.getName();
if(commandMap.get(command) != null){
if(playerCommandMap.get(associate) != null){
playerCommandMap.remove(associate);
}
playerCommandMap.put(associate, command);
sender.sendMessage("Command association successful");
}
else if(commandMap.get(command + "[op]") != null){
if(playerCommandMap.get(associate) != null){
playerCommandMap.remove(associate);
}
playerCommandMap.put(associate, command + "[op]");
sender.sendMessage("OP command association successful");
}
else{
sender.sendMessage("No command found with that identifier");
sender.sendMessage("Try \'/addacommand <identifier> <command> [args]\' first");
}
}
else
sender.sendMessage("Not enough arguments");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("removeclickcommand") == 0){
if(sender.hasPermission("autoruncommands.removeclick")){
String associate;
if(args.length == 1)
associate = args[0];
else
associate = sender.getName();
if(playerCommandMap.get(associate) != null){
playerCommandMap.remove(associate);
sender.sendMessage("Command removed");
}
else
sender.sendMessage(associate + " has no associated command");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("removedeathcommand") == 0){
if(sender.hasPermission("autoruncommands.removedeath")){
String associate;
if(args.length == 1)
associate = args[0];
else
associate = sender.getName();
if(deathCommandMap.get(associate) != null){
deathCommandMap.remove(associate);
sender.sendMessage("Command removed");
}
else
sender.sendMessage(associate + " has no associated death command");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("displayclickcommand") == 0){
if(sender.hasPermission("autoruncommands.displayclick")){
if(playerCommandMap.get("GLOBAL") != null)
sender.sendMessage("Your command in use is: /" + playerCommandMap.get("GLOBAL").replace("[op]", ""));
else if(playerCommandMap.get(sender.getName()) != null)
sender.sendMessage("Your command in use is: /" + playerCommandMap.get(sender.getName()).replace("[op]", ""));
else
sender.sendMessage("You have no associated command");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("displaydeathcommand") == 0){
if(sender.hasPermission("autoruncommands.displaydeath")){
if(deathCommandMap.get("GLOBAL") != null)
sender.sendMessage("Your death command in use is: /" + deathCommandMap.get("GLOBAL").replace("[op]", ""));
else if(deathCommandMap.get(sender.getName()) != null)
sender.sendMessage("Your death command in use is: /" + deathCommandMap.get(sender.getName().replace("[op]", "")));
else
sender.sendMessage("You have no associated death command");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("displaystartupcommands") == 0){
if(sender.hasPermission("autoruncommands.displaydeath")){
if(!startupCommands.isEmpty()){
sender.sendMessage("The commands that run on start up are:");
StringTokenizer st = new StringTokenizer(startupCommands, ":");
while(st.countTokens() > 0)
sender.sendMessage(st.nextToken().replace("[op]", ""));
}
else
- sender.sendMessage("You have no associated death command");
+ sender.sendMessage("You have no commands set to run at start up");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("setcommandblock") == 0){
if(sender.hasPermission("autoruncommands.setblock")){
if(args.length != 0){
if(commandMap.get(args[0]) != null){
blockCommand = args[0];
placeBlock = true;
sender.sendMessage("Right click with your fist to apply \'"
+ commandMap.get(args[0]) + "\'");
}
else{
sender.sendMessage("No command found with that identifier");
sender.sendMessage("Try \'/addacommand <identifier> <command> [args]\' first");
}
}
else
sender.sendMessage("No autorun command given");
}
}else if(commandLabel.compareToIgnoreCase("addacommand") == 0){
if(sender.hasPermission("autoruncommands.addcommand")){
if(args.length > 1){
String id;
String command;
id = args[0];
command = args[1];
for(int i = 2; i < args.length; i++){
command += " " + args[i];
}
if(commandMap.put(id, command) != null)
sender.sendMessage("Overwrote old command");
else
sender.sendMessage("Command added");
}
else
sender.sendMessage("An identifier and command must be given");
}
}else if(commandLabel.compareToIgnoreCase("addopcommand") == 0){
if(sender.hasPermission("autoruncommands.addopcommand")){
if(args.length > 1){
String id;
String command;
id = args[0] + "[op]";
command = args[1];
for(int i = 2; i < args.length; i++){
command += " " + args[i];
}
if(commandMap.put(id, command) != null)
sender.sendMessage("Overwrote old op command");
else
sender.sendMessage("Op command added");
}
else
sender.sendMessage("An identifier and command must be given");
}
else
sender.sendMessage("You don't have sufficient permissions");
}if(commandLabel.compareToIgnoreCase("setdeathcommand") == 0){
if(sender.hasPermission("autoruncommands.setdeath")){
if(args.length != 0){
String command = args[0];
String associate;
if(args.length == 2)
associate = args[1];
else
associate = sender.getName();
if(commandMap.get(command) != null){
if(deathCommandMap.get(associate) != null){
deathCommandMap.remove(associate);
}
deathCommandMap.put(associate, command);
sender.sendMessage("Command association successful");
}
else if(commandMap.get(command + "[op]") != null){
if(deathCommandMap.get(associate) != null){
deathCommandMap.remove(associate);
}
deathCommandMap.put(associate, command + "[op]");
sender.sendMessage("OP command association successful");
}
else{
sender.sendMessage("No command found with that identifier");
sender.sendMessage("Try \'/addacommand <identifier> <command> [args]\' first");
}
}
else
sender.sendMessage("Not enough arguments");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("addstartupcommand") == 0){
if(sender.hasPermission("autoruncommands.addstartup")){
if(args.length != 0){
String command = args[0];
if(commandMap.get(command) != null)
addStartupCommand(sender, command);
else if(commandMap.get(command + "[op]") != null)
addStartupCommand(sender, command);
else{
sender.sendMessage("No command found with that identifier");
sender.sendMessage("Try \'/addacommand <identifier> <command> [args]\' first");
}
}
else
sender.sendMessage("Not enough arguments");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("removestartupcommand") == 0){
if(sender.hasPermission("autoruncommands.removestartup")){
if(args.length != 0){
String command = args[0];
if(commandMap.get(command) != null)
removeStartupCommand(sender, command);
else if(commandMap.get(command + "[op]") != null)
removeStartupCommand(sender, command + "[op]");
else{
sender.sendMessage("No command found with that identifier");
sender.sendMessage("Try \'/addacommand <identifier> <command> [args]\' first");
}
}
else
sender.sendMessage("Not enough arguments");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
return true;
}
public void removeStartupCommand(CommandSender sender, String command){
if(startupCommands.contains(":" + command)){
startupCommands = startupCommands.replace(":" + command, "");
sender.sendMessage("Command removal successful");
}
else{
sender.sendMessage("No command set with that identifier");
sender.sendMessage("Check with \'/displaystartupcommands' first");
}
}
public void addStartupCommand(CommandSender sender, String command){
if(startupCommands.contains(command))
sender.sendMessage("That command is already present");
else{
startupCommands += ":" + command;
sender.sendMessage("Command association successful");
}
}
public HashMap<String, String> getPlayerClickMap(){
return playerCommandMap;
}
public HashMap<String, String> getPlayerDeathMap(){
return deathCommandMap;
}
public HashMap<Location, String> getBlockCommandMap(){
return blockCommandMap;
}
public HashMap<String, String> getCommandMap(){
return commandMap;
}
public boolean isPlaceBlock(){
return placeBlock;
}
public void setPlaceBlock(boolean placeBlock){
this.placeBlock = placeBlock;
}
public String getBlockCommand(){
return blockCommand;
}
public HashMap<String, Location> getPlayerPosMap() {
return playerPosMap;
}
public String getStartupCommands() {
return startupCommands;
}
public boolean isStartupDone() {
return startupDone;
}
public void setStartupDone(boolean startupDone) {
this.startupDone = startupDone;
}
}
| true
| true
|
public boolean onCommand(CommandSender sender, Command cmd, String commandLabel, String[] args){
if(commandLabel.compareToIgnoreCase("setclickcommand") == 0){
if(sender.hasPermission("autoruncommands.setclick")){
if(args.length != 0){
String command = args[0];
String associate;
if(args.length == 2)
associate = args[1];
else
associate = sender.getName();
if(commandMap.get(command) != null){
if(playerCommandMap.get(associate) != null){
playerCommandMap.remove(associate);
}
playerCommandMap.put(associate, command);
sender.sendMessage("Command association successful");
}
else if(commandMap.get(command + "[op]") != null){
if(playerCommandMap.get(associate) != null){
playerCommandMap.remove(associate);
}
playerCommandMap.put(associate, command + "[op]");
sender.sendMessage("OP command association successful");
}
else{
sender.sendMessage("No command found with that identifier");
sender.sendMessage("Try \'/addacommand <identifier> <command> [args]\' first");
}
}
else
sender.sendMessage("Not enough arguments");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("removeclickcommand") == 0){
if(sender.hasPermission("autoruncommands.removeclick")){
String associate;
if(args.length == 1)
associate = args[0];
else
associate = sender.getName();
if(playerCommandMap.get(associate) != null){
playerCommandMap.remove(associate);
sender.sendMessage("Command removed");
}
else
sender.sendMessage(associate + " has no associated command");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("removedeathcommand") == 0){
if(sender.hasPermission("autoruncommands.removedeath")){
String associate;
if(args.length == 1)
associate = args[0];
else
associate = sender.getName();
if(deathCommandMap.get(associate) != null){
deathCommandMap.remove(associate);
sender.sendMessage("Command removed");
}
else
sender.sendMessage(associate + " has no associated death command");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("displayclickcommand") == 0){
if(sender.hasPermission("autoruncommands.displayclick")){
if(playerCommandMap.get("GLOBAL") != null)
sender.sendMessage("Your command in use is: /" + playerCommandMap.get("GLOBAL").replace("[op]", ""));
else if(playerCommandMap.get(sender.getName()) != null)
sender.sendMessage("Your command in use is: /" + playerCommandMap.get(sender.getName()).replace("[op]", ""));
else
sender.sendMessage("You have no associated command");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("displaydeathcommand") == 0){
if(sender.hasPermission("autoruncommands.displaydeath")){
if(deathCommandMap.get("GLOBAL") != null)
sender.sendMessage("Your death command in use is: /" + deathCommandMap.get("GLOBAL").replace("[op]", ""));
else if(deathCommandMap.get(sender.getName()) != null)
sender.sendMessage("Your death command in use is: /" + deathCommandMap.get(sender.getName().replace("[op]", "")));
else
sender.sendMessage("You have no associated death command");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("displaystartupcommands") == 0){
if(sender.hasPermission("autoruncommands.displaydeath")){
if(!startupCommands.isEmpty()){
sender.sendMessage("The commands that run on start up are:");
StringTokenizer st = new StringTokenizer(startupCommands, ":");
while(st.countTokens() > 0)
sender.sendMessage(st.nextToken().replace("[op]", ""));
}
else
sender.sendMessage("You have no associated death command");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("setcommandblock") == 0){
if(sender.hasPermission("autoruncommands.setblock")){
if(args.length != 0){
if(commandMap.get(args[0]) != null){
blockCommand = args[0];
placeBlock = true;
sender.sendMessage("Right click with your fist to apply \'"
+ commandMap.get(args[0]) + "\'");
}
else{
sender.sendMessage("No command found with that identifier");
sender.sendMessage("Try \'/addacommand <identifier> <command> [args]\' first");
}
}
else
sender.sendMessage("No autorun command given");
}
}else if(commandLabel.compareToIgnoreCase("addacommand") == 0){
if(sender.hasPermission("autoruncommands.addcommand")){
if(args.length > 1){
String id;
String command;
id = args[0];
command = args[1];
for(int i = 2; i < args.length; i++){
command += " " + args[i];
}
if(commandMap.put(id, command) != null)
sender.sendMessage("Overwrote old command");
else
sender.sendMessage("Command added");
}
else
sender.sendMessage("An identifier and command must be given");
}
}else if(commandLabel.compareToIgnoreCase("addopcommand") == 0){
if(sender.hasPermission("autoruncommands.addopcommand")){
if(args.length > 1){
String id;
String command;
id = args[0] + "[op]";
command = args[1];
for(int i = 2; i < args.length; i++){
command += " " + args[i];
}
if(commandMap.put(id, command) != null)
sender.sendMessage("Overwrote old op command");
else
sender.sendMessage("Op command added");
}
else
sender.sendMessage("An identifier and command must be given");
}
else
sender.sendMessage("You don't have sufficient permissions");
}if(commandLabel.compareToIgnoreCase("setdeathcommand") == 0){
if(sender.hasPermission("autoruncommands.setdeath")){
if(args.length != 0){
String command = args[0];
String associate;
if(args.length == 2)
associate = args[1];
else
associate = sender.getName();
if(commandMap.get(command) != null){
if(deathCommandMap.get(associate) != null){
deathCommandMap.remove(associate);
}
deathCommandMap.put(associate, command);
sender.sendMessage("Command association successful");
}
else if(commandMap.get(command + "[op]") != null){
if(deathCommandMap.get(associate) != null){
deathCommandMap.remove(associate);
}
deathCommandMap.put(associate, command + "[op]");
sender.sendMessage("OP command association successful");
}
else{
sender.sendMessage("No command found with that identifier");
sender.sendMessage("Try \'/addacommand <identifier> <command> [args]\' first");
}
}
else
sender.sendMessage("Not enough arguments");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("addstartupcommand") == 0){
if(sender.hasPermission("autoruncommands.addstartup")){
if(args.length != 0){
String command = args[0];
if(commandMap.get(command) != null)
addStartupCommand(sender, command);
else if(commandMap.get(command + "[op]") != null)
addStartupCommand(sender, command);
else{
sender.sendMessage("No command found with that identifier");
sender.sendMessage("Try \'/addacommand <identifier> <command> [args]\' first");
}
}
else
sender.sendMessage("Not enough arguments");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("removestartupcommand") == 0){
if(sender.hasPermission("autoruncommands.removestartup")){
if(args.length != 0){
String command = args[0];
if(commandMap.get(command) != null)
removeStartupCommand(sender, command);
else if(commandMap.get(command + "[op]") != null)
removeStartupCommand(sender, command + "[op]");
else{
sender.sendMessage("No command found with that identifier");
sender.sendMessage("Try \'/addacommand <identifier> <command> [args]\' first");
}
}
else
sender.sendMessage("Not enough arguments");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
return true;
}
|
public boolean onCommand(CommandSender sender, Command cmd, String commandLabel, String[] args){
if(commandLabel.compareToIgnoreCase("setclickcommand") == 0){
if(sender.hasPermission("autoruncommands.setclick")){
if(args.length != 0){
String command = args[0];
String associate;
if(args.length == 2)
associate = args[1];
else
associate = sender.getName();
if(commandMap.get(command) != null){
if(playerCommandMap.get(associate) != null){
playerCommandMap.remove(associate);
}
playerCommandMap.put(associate, command);
sender.sendMessage("Command association successful");
}
else if(commandMap.get(command + "[op]") != null){
if(playerCommandMap.get(associate) != null){
playerCommandMap.remove(associate);
}
playerCommandMap.put(associate, command + "[op]");
sender.sendMessage("OP command association successful");
}
else{
sender.sendMessage("No command found with that identifier");
sender.sendMessage("Try \'/addacommand <identifier> <command> [args]\' first");
}
}
else
sender.sendMessage("Not enough arguments");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("removeclickcommand") == 0){
if(sender.hasPermission("autoruncommands.removeclick")){
String associate;
if(args.length == 1)
associate = args[0];
else
associate = sender.getName();
if(playerCommandMap.get(associate) != null){
playerCommandMap.remove(associate);
sender.sendMessage("Command removed");
}
else
sender.sendMessage(associate + " has no associated command");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("removedeathcommand") == 0){
if(sender.hasPermission("autoruncommands.removedeath")){
String associate;
if(args.length == 1)
associate = args[0];
else
associate = sender.getName();
if(deathCommandMap.get(associate) != null){
deathCommandMap.remove(associate);
sender.sendMessage("Command removed");
}
else
sender.sendMessage(associate + " has no associated death command");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("displayclickcommand") == 0){
if(sender.hasPermission("autoruncommands.displayclick")){
if(playerCommandMap.get("GLOBAL") != null)
sender.sendMessage("Your command in use is: /" + playerCommandMap.get("GLOBAL").replace("[op]", ""));
else if(playerCommandMap.get(sender.getName()) != null)
sender.sendMessage("Your command in use is: /" + playerCommandMap.get(sender.getName()).replace("[op]", ""));
else
sender.sendMessage("You have no associated command");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("displaydeathcommand") == 0){
if(sender.hasPermission("autoruncommands.displaydeath")){
if(deathCommandMap.get("GLOBAL") != null)
sender.sendMessage("Your death command in use is: /" + deathCommandMap.get("GLOBAL").replace("[op]", ""));
else if(deathCommandMap.get(sender.getName()) != null)
sender.sendMessage("Your death command in use is: /" + deathCommandMap.get(sender.getName().replace("[op]", "")));
else
sender.sendMessage("You have no associated death command");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("displaystartupcommands") == 0){
if(sender.hasPermission("autoruncommands.displaydeath")){
if(!startupCommands.isEmpty()){
sender.sendMessage("The commands that run on start up are:");
StringTokenizer st = new StringTokenizer(startupCommands, ":");
while(st.countTokens() > 0)
sender.sendMessage(st.nextToken().replace("[op]", ""));
}
else
sender.sendMessage("You have no commands set to run at start up");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("setcommandblock") == 0){
if(sender.hasPermission("autoruncommands.setblock")){
if(args.length != 0){
if(commandMap.get(args[0]) != null){
blockCommand = args[0];
placeBlock = true;
sender.sendMessage("Right click with your fist to apply \'"
+ commandMap.get(args[0]) + "\'");
}
else{
sender.sendMessage("No command found with that identifier");
sender.sendMessage("Try \'/addacommand <identifier> <command> [args]\' first");
}
}
else
sender.sendMessage("No autorun command given");
}
}else if(commandLabel.compareToIgnoreCase("addacommand") == 0){
if(sender.hasPermission("autoruncommands.addcommand")){
if(args.length > 1){
String id;
String command;
id = args[0];
command = args[1];
for(int i = 2; i < args.length; i++){
command += " " + args[i];
}
if(commandMap.put(id, command) != null)
sender.sendMessage("Overwrote old command");
else
sender.sendMessage("Command added");
}
else
sender.sendMessage("An identifier and command must be given");
}
}else if(commandLabel.compareToIgnoreCase("addopcommand") == 0){
if(sender.hasPermission("autoruncommands.addopcommand")){
if(args.length > 1){
String id;
String command;
id = args[0] + "[op]";
command = args[1];
for(int i = 2; i < args.length; i++){
command += " " + args[i];
}
if(commandMap.put(id, command) != null)
sender.sendMessage("Overwrote old op command");
else
sender.sendMessage("Op command added");
}
else
sender.sendMessage("An identifier and command must be given");
}
else
sender.sendMessage("You don't have sufficient permissions");
}if(commandLabel.compareToIgnoreCase("setdeathcommand") == 0){
if(sender.hasPermission("autoruncommands.setdeath")){
if(args.length != 0){
String command = args[0];
String associate;
if(args.length == 2)
associate = args[1];
else
associate = sender.getName();
if(commandMap.get(command) != null){
if(deathCommandMap.get(associate) != null){
deathCommandMap.remove(associate);
}
deathCommandMap.put(associate, command);
sender.sendMessage("Command association successful");
}
else if(commandMap.get(command + "[op]") != null){
if(deathCommandMap.get(associate) != null){
deathCommandMap.remove(associate);
}
deathCommandMap.put(associate, command + "[op]");
sender.sendMessage("OP command association successful");
}
else{
sender.sendMessage("No command found with that identifier");
sender.sendMessage("Try \'/addacommand <identifier> <command> [args]\' first");
}
}
else
sender.sendMessage("Not enough arguments");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("addstartupcommand") == 0){
if(sender.hasPermission("autoruncommands.addstartup")){
if(args.length != 0){
String command = args[0];
if(commandMap.get(command) != null)
addStartupCommand(sender, command);
else if(commandMap.get(command + "[op]") != null)
addStartupCommand(sender, command);
else{
sender.sendMessage("No command found with that identifier");
sender.sendMessage("Try \'/addacommand <identifier> <command> [args]\' first");
}
}
else
sender.sendMessage("Not enough arguments");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
else if(commandLabel.compareToIgnoreCase("removestartupcommand") == 0){
if(sender.hasPermission("autoruncommands.removestartup")){
if(args.length != 0){
String command = args[0];
if(commandMap.get(command) != null)
removeStartupCommand(sender, command);
else if(commandMap.get(command + "[op]") != null)
removeStartupCommand(sender, command + "[op]");
else{
sender.sendMessage("No command found with that identifier");
sender.sendMessage("Try \'/addacommand <identifier> <command> [args]\' first");
}
}
else
sender.sendMessage("Not enough arguments");
}
else
sender.sendMessage("You don't have sufficient permissions");
}
return true;
}
|
diff --git a/cspi-webui/src/main/java/org/collectionspace/chain/csp/webui/record/RecordSearchList.java b/cspi-webui/src/main/java/org/collectionspace/chain/csp/webui/record/RecordSearchList.java
index 9bf0b54a..6f9483da 100644
--- a/cspi-webui/src/main/java/org/collectionspace/chain/csp/webui/record/RecordSearchList.java
+++ b/cspi-webui/src/main/java/org/collectionspace/chain/csp/webui/record/RecordSearchList.java
@@ -1,436 +1,436 @@
/* Copyright 2010 University of Cambridge
* Licensed under the Educational Community License (ECL), Version 2.0. You may not use this file except in
* compliance with this License.
*
* You may obtain a copy of the ECL 2.0 License at https://source.collectionspace.org/collection-space/LICENSE.txt
*/
package org.collectionspace.chain.csp.webui.record;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.collectionspace.chain.csp.schema.FieldParent;
import org.collectionspace.chain.csp.schema.FieldSet;
import org.collectionspace.chain.csp.schema.Record;
import org.collectionspace.chain.csp.schema.Repeat;
import org.collectionspace.chain.csp.schema.Spec;
import org.collectionspace.chain.csp.webui.main.Request;
import org.collectionspace.chain.csp.webui.main.WebMethod;
import org.collectionspace.chain.csp.webui.main.WebUI;
import org.collectionspace.chain.csp.webui.misc.Generic;
import org.collectionspace.chain.csp.webui.misc.GenericSearch;
import org.collectionspace.csp.api.persistence.ExistException;
import org.collectionspace.csp.api.persistence.Storage;
import org.collectionspace.csp.api.persistence.UnderlyingStorageException;
import org.collectionspace.csp.api.persistence.UnimplementedException;
import org.collectionspace.csp.api.ui.UIException;
import org.collectionspace.csp.api.ui.UIRequest;
import org.collectionspace.csp.api.ui.UISession;
import org.collectionspace.services.common.api.RefName;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class RecordSearchList implements WebMethod {
public static final int MODE_LIST = 0;
public static final int MODE_SEARCH = 1;
public static final int MODE_SEARCH_RELATED = 2;
private static final Logger log=LoggerFactory.getLogger(RecordSearchList.class);
private int mode;
private String base;
private Spec spec;
private Record r;
private Map<String,String> type_to_url=new HashMap<String,String>();
private String searchAllGroup;
private final static String UNKNOWN_RECORD_TYPE = "UNKNOWN";
public RecordSearchList(Record r, int mode) {
this(r, mode, null);
}
public RecordSearchList(Record r, int mode, String searchAllGroup) {
this.r = r;
this.spec=r.getSpec();
this.base=r.getID();
this.mode=mode;
this.searchAllGroup = searchAllGroup;
}
/**
* Retrieve the mini summary information e.g. summary and number and append the csid and recordType to it
* @param {Storage} storage Type of storage (e.g. AuthorizationStorage, RecordStorage,...)
* @param {String} type The type of record requested (e.g. permission)
* @param {String} csid The csid of the record
* @return {JSONObject} The JSON string containing the mini record
* @throws ExistException
* @throws UnimplementedException
* @throws UnderlyingStorageException
* @throws JSONException
*/
private JSONObject generateMiniRecord(Storage storage,String type,String csid) throws JSONException {
String postfix = "list";
if(this.mode==MODE_SEARCH){
postfix = "search";
}
JSONObject restrictions = new JSONObject();
JSONObject out = new JSONObject();
try {
if(csid == null || csid.equals("")){
return out;
}
out = storage.retrieveJSON(type+"/"+csid+"/view/"+postfix,restrictions);
out.put("csid",csid);
String recordtype = null;
if(!r.isType("searchall")) {
recordtype = type_to_url.get(type);
} else {
JSONObject summarylist = out.getJSONObject("summarylist");
String uri = summarylist.getString("uri");
if(uri!=null && uri.startsWith("/"))
uri=uri.substring(1);
String[] parts=uri.split("/");
String recordurl = parts[0];
- Record itemr = r.getSpec().getRecordByServicesUrl(recordurl);
+ Record itemr = r.getSpec().getRecordByServicesUrl(recordurl);
if (itemr == null) {
- String docType = summarylist.getString("docType");
+ String docType = summarylist.getString("docType");
itemr = r.getSpec().getRecordByServicesDocType(docType);
}
if (itemr == null) {
recordtype = UNKNOWN_RECORD_TYPE;
log.warn("Could not get record type for record with services URL " + recordurl);
} else {
recordtype = type_to_url.get(itemr.getID());
// Include the vocabulary name ("namespace") value for each authority item record in the list
String refName = null;
RefName.AuthorityItem item = null;
if(summarylist.has("refName")) {
refName = summarylist.getString("refName");
}
- if(refName!=null) {
+ if(refName!=null) {
item = RefName.AuthorityItem.parse(refName);
}
if(item!=null) {
out.put("namespace",item.getParentShortIdentifier());
} else {
log.warn("Could not get vocabulary namespace for record with services URL " + recordurl);
}
}
- }
+ }
out.put("recordtype", recordtype);
// CSPACE-2894
if(this.r.getID().equals("permission")){
String summary = out.getString("summary");
String name = Generic.ResourceNameUI(this.r.getSpec(), summary);
if(name.contains(WORKFLOW_SUB_RESOURCE)){
return null;
}
out.put("summary", name);
out.put("display", Generic.getPermissionView(this.r.getSpec(), summary));
}
} catch (ExistException e) {
out.put("csid",csid);
out.put("isError", true);
JSONObject msg = new JSONObject();
msg.put("severity", "error");
msg.put("message", "Exist Exception:"+e.getMessage());
JSONArray msgs = new JSONArray();
msgs.put(msg);
out.put("messages", msgs);
} catch (UnimplementedException e) {
out.put("csid",csid);
out.put("isError", true);
JSONObject msg = new JSONObject();
msg.put("severity", "error");
msg.put("message", "Exist Exception:"+e.getMessage());
JSONArray msgs = new JSONArray();
msgs.put(msg);
out.put("messages", msgs);
} catch (UnderlyingStorageException e) {
out.put("csid",csid);
out.put("isError", true);
JSONObject msg = new JSONObject();
msg.put("severity", "error");
msg.put("message", "Exist Exception:"+e.getMessage());
JSONArray msgs = new JSONArray();
msgs.put(msg);
out.put("messages", msgs);
}
return out;
}
/**
* Intermediate function to generateMiniRecord. This function only exists for if someone would like to create different types
* of records e.g. MiniRecordA, MiniRecordB,...
* @param {Storage} storage The type of storage (e.g. AuthorizationStorage,RecordStorage,...)
* @param {String} base The type of record (e.g. permission)
* @param {String} member The csid of the object
* @return {JSONObject} A JSONObject containing the mini record.
* @throws JSONException
* @throws ExistException
* @throws UnimplementedException
* @throws UnderlyingStorageException
*/
private JSONObject generateEntry(Storage storage,String base,String member) throws JSONException, ExistException, UnimplementedException, UnderlyingStorageException {
return generateMiniRecord(storage,base,member);
}
/**
* Creates a list of results containing:summary, number, recordType, csid
* @param {Storage} storage The type of storage (e.g. AuthorizationStorage,RecordStorage,...)
* @param {String} base The type of record (e.g. permission)
* @param {String[]} paths The list of csids from the records that were requested
* @param {String} key The surrounding key for the results (e.g. {"key":{...}})
* @return {JSONObject} The JSONObject that is sent back to the UI Layer
* @throws JSONException
* @throws ExistException
* @throws UnimplementedException
* @throws UnderlyingStorageException
*/
private JSONObject pathsToJSON(Storage storage,String base,String[] paths,String key, JSONObject pagination) throws JSONException, ExistException, UnimplementedException, UnderlyingStorageException {
JSONObject out=new JSONObject();
JSONArray members=new JSONArray();
for(String p : paths){
JSONObject temp = generateEntry(storage,base,p);
if(temp !=null){
members.put(temp);
}
}
out.put(key,members);
if(pagination!=null){
out.put("pagination",pagination);
}
return out;
}
/**
* This function is the general function that calls the correct funtions to get all the data that the UI requested and get it in the
* correct format for the UI.
* @param {Storage} storage The type of storage requested (e.g. RecordStorage, AuthorizationStorage,...)
* @param {UIRequest} ui The request from the ui to which we send a response.
* @param {String} param If a querystring has been added to the URL(e.g.'?query='), it will be in this param
* @param {String} pageSize The amount of results per page requested.
* @param {String} pageNum The amount of pages requested.
* @throws UIException
*/
private void search_or_list(Storage storage,UIRequest ui,String path) throws UIException {
try {
JSONObject restrictedkey = GenericSearch.setRestricted(ui,null,null,null,(mode==MODE_SEARCH),this.r);
JSONObject restriction = restrictedkey.getJSONObject("restriction");
String key = restrictedkey.getString("key");
JSONObject results = getResults(ui, storage, restriction, key, path);
//cache for record traverser
if(results.has("pagination") && results.getJSONObject("pagination").has("separatelists")){
GenericSearch.createTraverser(ui, this.r.getID(), "", results, restriction, key, 1);
}
ui.sendJSONResponse(results);
} catch (JSONException e) {
throw new UIException("JSONException during search_or_list",e);
} catch (ExistException e) {
throw new UIException("ExistException during search_or_list",e);
} catch (UnimplementedException e) {
throw new UIException("UnimplementedException during search_or_list",e);
} catch (UnderlyingStorageException x) {
UIException uiexception = new UIException(x.getMessage(),x.getStatus(),x.getUrl(),x);
ui.sendJSONResponse(uiexception.getJSON());
}
}
/**
* This function is the general function that calls the correct funtions to get all the data that the UI requested and get it in the
* correct format for the UI.
* @param {Storage} storage The type of storage requested (e.g. RecordStorage, AuthorizationStorage,...)
* @param {UIRequest} ui The request from the ui to which we send a response.
* @param {String} param If a querystring has been added to the URL(e.g.'?query='), it will be in this param
* @param {String} pageSize The amount of results per page requested.
* @param {String} pageNum The amount of pages requested.
* @throws UIException
* @throws UnderlyingStorageException
* @throws UnimplementedException
* @throws ExistException
* @throws JSONException
*/
protected JSONObject getResults(UIRequest request,Storage storage,JSONObject restriction, String key, String path) throws UIException, JSONException, ExistException, UnimplementedException, UnderlyingStorageException {
JSONObject results = new JSONObject();
if(this.r.getID().equals("permission")){
//pagination isn't properly implemented in permissions so just keep looping til we get everything
int pgnum = 0;
if(restriction.has("pageNum")){ //just get teh page specified
results = getJSON(storage,restriction,key,base);
}
else{ // if not specified page then loop over them all.
JSONArray newitems =new JSONArray();
results = getJSON(storage,restriction,key,base);
while(results.has(key) && results.getJSONArray(key).length()>0){
JSONArray items = results.getJSONArray(key);
for(int i=0;i<items.length();i++){
newitems.put(items.get(i));
}
pgnum++;
restriction.put("pageNum", Integer.toString(pgnum));
results = getJSON(storage,restriction,key,base);
}
results.put(key, newitems);
}
}
else if(r.getID().equals("reports")){
String type= "";
if(path!=null && !path.equals("")){
restriction.put("queryTerm", "doctype");
restriction.put("queryString", spec.getRecordByWebUrl(path).getServicesTenantSg());
}
if(restriction.has("queryTerm") && restriction.getString("queryTerm").equals("doctype")){
type = restriction.getString("queryString");
results = getJSON(storage,restriction,key,base);
results = showReports(results, type, key);
if(request!=null) {
int cacheMaxAgeSeconds = spec.getAdminData().getReportListCacheAge();
if(cacheMaxAgeSeconds > 0) {
request.setCacheMaxAgeSeconds(cacheMaxAgeSeconds);
}
}
}
else{
JSONObject reporting = new JSONObject();
for(Record r2 : spec.getAllRecords()) {
if(r2.isInRecordList()){
type = r2.getServicesTenantSg();
restriction.put("queryTerm","doctype");
restriction.put("queryString",type);
JSONObject rdata = getJSON(storage,restriction,key,base);
JSONObject procedurereports = showReports(rdata, type, key);
reporting.put(r2.getWebURL(), procedurereports);
}
}
results.put("reporting", reporting);
}
}
else{
if((mode==MODE_SEARCH_RELATED) && !path.isEmpty()) {
// This is a related to case
restriction.put(GenericSearch.SEARCH_RELATED_TO_CSID_AS_SUBJECT, path);
}
if((searchAllGroup != null) && r.isType("searchall")) { // Add a new service group name to
restriction.put(GenericSearch.SEARCH_ALL_GROUP, searchAllGroup);
}
results = getJSON(storage,restriction,key,base);
}
return results;
}
private JSONObject showReports(JSONObject data, String type, String key) throws JSONException{
JSONObject results = new JSONObject();
JSONArray list = new JSONArray();
JSONArray names = new JSONArray();
if(data.has(key)){
JSONArray ja = data.getJSONArray(key);
for(int j=0;j<ja.length();j++){
list.put(ja.getJSONObject(j).getString("csid"));
names.put(ja.getJSONObject(j).getString("number"));
}
results.put("reportlist", list);
results.put("reportnames", names);
}
return results;
}
private void advancedSearch(Storage storage,UIRequest ui,String path, JSONObject params) throws UIException{
try {
JSONObject results = new JSONObject();
JSONObject restrictedkey = GenericSearch.setRestricted(ui,null,null,null,true,this.r);
JSONObject restriction = restrictedkey.getJSONObject("restriction");
String key = restrictedkey.getString("key");
GenericSearch.buildQuery(this.r,params, restriction);
key="results";
results = getJSON(storage,restriction,key,base);
//cache for record traverser
if(results.has("pagination") && results.getJSONObject("pagination").has("separatelists")){
GenericSearch.createTraverser(ui, this.r.getID(), "", results, restriction, key, 1);
}
ui.sendJSONResponse(results);
} catch (JSONException e) {
throw new UIException("JSONException during advancedSearch "+e.getMessage(),e);
} catch (ExistException e) {
throw new UIException("ExistException during search_or_list",e);
} catch (UnimplementedException e) {
throw new UIException("UnimplementedException during search_or_list",e);
} catch (UnderlyingStorageException x) {
UIException uiexception = new UIException(x.getMessage(),x.getStatus(),x.getUrl(),x);
ui.sendJSONResponse(uiexception.getJSON());
}
}
public void searchtype(Storage storage,UIRequest ui,String path) throws UIException{
if(ui.getBody() == null || StringUtils.isBlank(ui.getBody())){
search_or_list(storage,ui,path);
}
else{
//advanced search
advancedSearch(storage,ui,path, ui.getJSONBody());
}
}
/* Wrapper exists to be used inRead, hence not private */
public JSONObject getJSON(Storage storage,JSONObject restriction, String key, String mybase)
throws JSONException, UIException, ExistException, UnimplementedException, UnderlyingStorageException{
JSONObject out = new JSONObject();
JSONObject data = storage.getPathsJSON(mybase,restriction);
String[] paths = (String[]) data.get("listItems");
JSONObject pagination = new JSONObject();
if(data.has("pagination")){
pagination = data.getJSONObject("pagination");
}
for(int i=0;i<paths.length;i++) {
if(paths[i].startsWith(mybase+"/")){
paths[i]=paths[i].substring((mybase+"/").length());
}
}
out = pathsToJSON(storage,mybase,paths,key,pagination);
return out;
}
public void run(Object in,String[] tail) throws UIException {
Request q=(Request)in;
searchtype(q.getStorage(),q.getUIRequest(),StringUtils.join(tail,"/"));
}
public void configure(Spec spec) {
configure(null, spec);
}
public void configure(WebUI ui,Spec spec) {
for(Record r : spec.getAllRecords()) {
type_to_url.put(r.getID(),r.getWebURL());
}
}
}
| false
| true
|
private JSONObject generateMiniRecord(Storage storage,String type,String csid) throws JSONException {
String postfix = "list";
if(this.mode==MODE_SEARCH){
postfix = "search";
}
JSONObject restrictions = new JSONObject();
JSONObject out = new JSONObject();
try {
if(csid == null || csid.equals("")){
return out;
}
out = storage.retrieveJSON(type+"/"+csid+"/view/"+postfix,restrictions);
out.put("csid",csid);
String recordtype = null;
if(!r.isType("searchall")) {
recordtype = type_to_url.get(type);
} else {
JSONObject summarylist = out.getJSONObject("summarylist");
String uri = summarylist.getString("uri");
if(uri!=null && uri.startsWith("/"))
uri=uri.substring(1);
String[] parts=uri.split("/");
String recordurl = parts[0];
Record itemr = r.getSpec().getRecordByServicesUrl(recordurl);
if (itemr == null) {
String docType = summarylist.getString("docType");
itemr = r.getSpec().getRecordByServicesDocType(docType);
}
if (itemr == null) {
recordtype = UNKNOWN_RECORD_TYPE;
log.warn("Could not get record type for record with services URL " + recordurl);
} else {
recordtype = type_to_url.get(itemr.getID());
// Include the vocabulary name ("namespace") value for each authority item record in the list
String refName = null;
RefName.AuthorityItem item = null;
if(summarylist.has("refName")) {
refName = summarylist.getString("refName");
}
if(refName!=null) {
item = RefName.AuthorityItem.parse(refName);
}
if(item!=null) {
out.put("namespace",item.getParentShortIdentifier());
} else {
log.warn("Could not get vocabulary namespace for record with services URL " + recordurl);
}
}
}
out.put("recordtype", recordtype);
// CSPACE-2894
if(this.r.getID().equals("permission")){
String summary = out.getString("summary");
String name = Generic.ResourceNameUI(this.r.getSpec(), summary);
if(name.contains(WORKFLOW_SUB_RESOURCE)){
return null;
}
out.put("summary", name);
out.put("display", Generic.getPermissionView(this.r.getSpec(), summary));
}
} catch (ExistException e) {
out.put("csid",csid);
out.put("isError", true);
JSONObject msg = new JSONObject();
msg.put("severity", "error");
msg.put("message", "Exist Exception:"+e.getMessage());
JSONArray msgs = new JSONArray();
msgs.put(msg);
out.put("messages", msgs);
} catch (UnimplementedException e) {
out.put("csid",csid);
out.put("isError", true);
JSONObject msg = new JSONObject();
msg.put("severity", "error");
msg.put("message", "Exist Exception:"+e.getMessage());
JSONArray msgs = new JSONArray();
msgs.put(msg);
out.put("messages", msgs);
} catch (UnderlyingStorageException e) {
out.put("csid",csid);
out.put("isError", true);
JSONObject msg = new JSONObject();
msg.put("severity", "error");
msg.put("message", "Exist Exception:"+e.getMessage());
JSONArray msgs = new JSONArray();
msgs.put(msg);
out.put("messages", msgs);
}
return out;
}
|
private JSONObject generateMiniRecord(Storage storage,String type,String csid) throws JSONException {
String postfix = "list";
if(this.mode==MODE_SEARCH){
postfix = "search";
}
JSONObject restrictions = new JSONObject();
JSONObject out = new JSONObject();
try {
if(csid == null || csid.equals("")){
return out;
}
out = storage.retrieveJSON(type+"/"+csid+"/view/"+postfix,restrictions);
out.put("csid",csid);
String recordtype = null;
if(!r.isType("searchall")) {
recordtype = type_to_url.get(type);
} else {
JSONObject summarylist = out.getJSONObject("summarylist");
String uri = summarylist.getString("uri");
if(uri!=null && uri.startsWith("/"))
uri=uri.substring(1);
String[] parts=uri.split("/");
String recordurl = parts[0];
Record itemr = r.getSpec().getRecordByServicesUrl(recordurl);
if (itemr == null) {
String docType = summarylist.getString("docType");
itemr = r.getSpec().getRecordByServicesDocType(docType);
}
if (itemr == null) {
recordtype = UNKNOWN_RECORD_TYPE;
log.warn("Could not get record type for record with services URL " + recordurl);
} else {
recordtype = type_to_url.get(itemr.getID());
// Include the vocabulary name ("namespace") value for each authority item record in the list
String refName = null;
RefName.AuthorityItem item = null;
if(summarylist.has("refName")) {
refName = summarylist.getString("refName");
}
if(refName!=null) {
item = RefName.AuthorityItem.parse(refName);
}
if(item!=null) {
out.put("namespace",item.getParentShortIdentifier());
} else {
log.warn("Could not get vocabulary namespace for record with services URL " + recordurl);
}
}
}
out.put("recordtype", recordtype);
// CSPACE-2894
if(this.r.getID().equals("permission")){
String summary = out.getString("summary");
String name = Generic.ResourceNameUI(this.r.getSpec(), summary);
if(name.contains(WORKFLOW_SUB_RESOURCE)){
return null;
}
out.put("summary", name);
out.put("display", Generic.getPermissionView(this.r.getSpec(), summary));
}
} catch (ExistException e) {
out.put("csid",csid);
out.put("isError", true);
JSONObject msg = new JSONObject();
msg.put("severity", "error");
msg.put("message", "Exist Exception:"+e.getMessage());
JSONArray msgs = new JSONArray();
msgs.put(msg);
out.put("messages", msgs);
} catch (UnimplementedException e) {
out.put("csid",csid);
out.put("isError", true);
JSONObject msg = new JSONObject();
msg.put("severity", "error");
msg.put("message", "Exist Exception:"+e.getMessage());
JSONArray msgs = new JSONArray();
msgs.put(msg);
out.put("messages", msgs);
} catch (UnderlyingStorageException e) {
out.put("csid",csid);
out.put("isError", true);
JSONObject msg = new JSONObject();
msg.put("severity", "error");
msg.put("message", "Exist Exception:"+e.getMessage());
JSONArray msgs = new JSONArray();
msgs.put(msg);
out.put("messages", msgs);
}
return out;
}
|
diff --git a/classes/test/SynchronizedStart.java b/classes/test/SynchronizedStart.java
index f1b85488..db1dcd62 100644
--- a/classes/test/SynchronizedStart.java
+++ b/classes/test/SynchronizedStart.java
@@ -1,25 +1,25 @@
package classes.test;
/*
This test replicates a bug encountered when running the
Eclipse Java Compiler, when calling the constructor for
org.eclipse.jdt.internal.compiler.ProcessTaskManager
*/
public class SynchronizedStart implements Runnable {
public SynchronizedStart() {
synchronized (this) {
System.out.println("1: inside ctor synchronized block");
- new Thread(this, "thread").start();
+ new Thread(this, "runner").start();
}
System.out.println("2: outside ctor synchronized block");
}
public void run() {
synchronized (this) {
System.out.println("3: inside run synchronized block");
}
System.out.println("4: outside run synchronized block");
}
public static void main(String[] args) {
new SynchronizedStart();
}
}
| true
| true
|
public SynchronizedStart() {
synchronized (this) {
System.out.println("1: inside ctor synchronized block");
new Thread(this, "thread").start();
}
System.out.println("2: outside ctor synchronized block");
}
|
public SynchronizedStart() {
synchronized (this) {
System.out.println("1: inside ctor synchronized block");
new Thread(this, "runner").start();
}
System.out.println("2: outside ctor synchronized block");
}
|
diff --git a/src/java/org/wings/DefaultReloadManager.java b/src/java/org/wings/DefaultReloadManager.java
index fadfb4c4..e966a804 100644
--- a/src/java/org/wings/DefaultReloadManager.java
+++ b/src/java/org/wings/DefaultReloadManager.java
@@ -1,385 +1,385 @@
package org.wings;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wings.plaf.ComponentCG;
import org.wings.plaf.Update;
import org.wings.util.SStringBuilder;
/**
* Default implementation of the reload manager.
*
* @author Stephan Schuster
*/
public class DefaultReloadManager implements ReloadManager {
private final transient static Log log = LogFactory.getLog(DefaultReloadManager.class);
private int updateCount = 0;
private boolean updateMode = false;
private boolean acceptChanges = true;
protected final Map fullReplaceUpdates = new HashMap();
protected final Map fineGrainedUpdates = new HashMap();
protected final List componentsToReload = new ArrayList();
public void reload(SComponent component) {
if (component == null)
throw new IllegalArgumentException("Component must not be null!");
if (updateMode) {
addUpdate(component, null);
} else {
if (!componentsToReload.contains(component)) {
componentsToReload.add(component);
}
}
}
public void addUpdate(SComponent component, Update update) {
if (component == null)
throw new IllegalArgumentException("Component must not be null!");
if (update == null) {
update = component.getCG().getComponentUpdate(component);
if (update == null) {
SFrame frame = component.getParentFrame();
if (frame != null)
fullReplaceUpdates.put(frame, null);
return;
}
}
component = update.getComponent();
if (acceptChanges) {
PotentialUpdate potentialUpdate = new PotentialUpdate(update);
if ((update.getProperty() & Update.FULL_REPLACE_UPDATE) == Update.FULL_REPLACE_UPDATE) {
fullReplaceUpdates.put(component, potentialUpdate);
} else {
Set potentialUpdates = getFineGrainedUpdates(component);
potentialUpdates.remove(potentialUpdate);
potentialUpdates.add(potentialUpdate);
fineGrainedUpdates.put(component, potentialUpdates);
}
} else if (log.isDebugEnabled()) {
//log.debug("Component " + component + " changed after invalidation of frames.");
}
}
public List getUpdates() {
if (!componentsToReload.isEmpty()) {
for (Iterator i = componentsToReload.iterator(); i.hasNext();) {
boolean tmp = acceptChanges;
acceptChanges = true;
addUpdate((SComponent) i.next(), null);
acceptChanges = tmp;
}
}
filterUpdates();
List filteredUpdates = new ArrayList(fullReplaceUpdates.values());
for (Iterator i = fineGrainedUpdates.values().iterator(); i.hasNext();) {
filteredUpdates.addAll((Set) i.next());
}
Collections.sort(filteredUpdates, getUpdateComparator());
return filteredUpdates;
}
public Set getDirtyComponents() {
final Set dirtyComponents = new HashSet();
dirtyComponents.addAll(fullReplaceUpdates.keySet());
dirtyComponents.addAll(fineGrainedUpdates.keySet());
dirtyComponents.addAll(componentsToReload);
return dirtyComponents;
}
public Set getDirtyFrames() {
final Set dirtyFrames = new HashSet(5);
for (Iterator i = getDirtyComponents().iterator(); i.hasNext();) {
SFrame parentFrame = ((SComponent) i.next()).getParentFrame();
if (parentFrame != null)
dirtyFrames.add(parentFrame);
}
return dirtyFrames;
}
public void invalidateFrames() {
Iterator i = getDirtyFrames().iterator();
while (i.hasNext()) {
((SFrame) i.next()).invalidate();
i.remove();
}
acceptChanges = false;
}
public void notifyCGs() {
for (Iterator i = getDirtyComponents().iterator(); i.hasNext();) {
SComponent component = (SComponent) i.next();
ComponentCG componentCG = component.getCG();
if (componentCG != null)
componentCG.componentChanged(component);
}
}
public void clear() {
updateCount = 0;
updateMode = false;
acceptChanges = true;
fullReplaceUpdates.clear();
fineGrainedUpdates.clear();
componentsToReload.clear();
}
public boolean isUpdateMode() {
return updateMode;
}
public void setUpdateMode(boolean updateMode) {
this.updateMode = updateMode;
}
public boolean isReloadRequired(SFrame frame) {
if (updateMode)
return fullReplaceUpdates.containsKey(frame);
else
return true;
}
protected Set getFineGrainedUpdates(SComponent component) {
Set potentialUpdates = (Set) fineGrainedUpdates.get(component);
if (potentialUpdates == null) {
potentialUpdates = new HashSet(5);
}
return potentialUpdates;
}
protected void filterUpdates() {
if (log.isDebugEnabled())
printAllUpdates("Potential updates:");
fineGrainedUpdates.keySet().removeAll(fullReplaceUpdates.keySet());
SortedMap componentHierarchy = new TreeMap(new PathComparator());
for (Iterator i = getDirtyComponents().iterator(); i.hasNext();) {
SComponent component = (SComponent) i.next();
if ((!component.isRecursivelyVisible() && !(component instanceof SMenu)) ||
component.getParentFrame() == null) {
fullReplaceUpdates.remove(component);
fineGrainedUpdates.remove(component);
} else {
componentHierarchy.put(getPath(component).toString(), component);
}
}
for (Iterator i = componentHierarchy.keySet().iterator(); i.hasNext();) {
String topPath = (String) i.next();
if (fullReplaceUpdates.containsKey(componentHierarchy.get(topPath))) {
while (i.hasNext()) {
String subPath = (String) i.next();
- if (subPath.startsWith(topPath)) {
+ if (subPath.startsWith(topPath + "/")) {
fullReplaceUpdates.remove(componentHierarchy.get(subPath));
fineGrainedUpdates.remove(componentHierarchy.get(subPath));
i.remove();
}
}
}
i = componentHierarchy.tailMap(topPath + "\0").keySet().iterator();
}
if (log.isDebugEnabled())
printAllUpdates("Effective updates:");
}
private SStringBuilder getPath(SComponent component) {
if (component == null) {
return new SStringBuilder();
} else {
if (component instanceof SMenuItem) {
SMenuItem menuItem = (SMenuItem) component;
return getPath(menuItem.getParentMenu()).append("/").append(component.getName());
} else if (component instanceof SSpinner.DefaultEditor) {
SSpinner.DefaultEditor defaultEditor = (SSpinner.DefaultEditor) component;
return getPath(defaultEditor.getSpinner()).append("/").append(component.getName());
} else {
return getPath(component.getParent()).append("/").append(component.getName());
}
}
}
private void printAllUpdates(String header) {
log.debug(header);
int numberOfUpdates = 0;
for (Iterator i = getDirtyComponents().iterator(); i.hasNext();) {
SStringBuilder output = new SStringBuilder();
SComponent component = (SComponent) i.next();
output.append(" ").append(component + ":");
if (fullReplaceUpdates.containsKey(component)) {
output.append(" " + fullReplaceUpdates.get(component));
if (fullReplaceUpdates.get(component) == null)
output.append(" [no component update supported --> reload frame!!!]");
++numberOfUpdates;
}
for (Iterator j = getFineGrainedUpdates(component).iterator(); j.hasNext();) {
output.append(" " + j.next());
++numberOfUpdates;
}
log.debug(output.toString());
}
log.debug(" --> " + numberOfUpdates + " updates");
}
private final class PotentialUpdate implements Update {
private Update update;
private int position;
public PotentialUpdate(Update update) {
this.update = update;
this.position = updateCount++;
}
public SComponent getComponent() {
return update.getComponent();
}
public Handler getHandler() {
return update.getHandler();
}
public int getProperty() {
return update.getProperty();
}
public int getPriority() {
return update.getPriority();
}
public int getPosition() {
return position;
}
public boolean equals(Object object) {
if (object == this)
return true;
if (object == null || object.getClass() != this.getClass())
return false;
PotentialUpdate other = (PotentialUpdate) object;
return update.equals(other.update);
}
public int hashCode() {
return update.hashCode();
}
public String toString() {
String clazz = update.getClass().getName();
int index = clazz.lastIndexOf("$");
if (index < 0)
index = clazz.lastIndexOf(".");
return clazz.substring(++index) + "[" + getPriority() + "|" + getPosition() + "]";
}
}
private Comparator getUpdateComparator() {
return
new CombinedComparator(
new InverseComparator(new PriorityComparator()),
new PositionComparator()
);
}
private static class PathComparator implements Comparator {
public int compare(Object object1, Object object2) {
String path1 = (String) object1;
String path2 = (String) object2;
int depthOfPath1 = path1.split("/").length;
int depthOfPath2 = path2.split("/").length;
if (depthOfPath1 < depthOfPath2) return -1;
if (depthOfPath1 > depthOfPath2) return 1;
return path1.compareTo(path2);
}
}
private static class PositionComparator implements Comparator {
public int compare(Object object1, Object object2) {
PotentialUpdate update1 = (PotentialUpdate) object1;
PotentialUpdate update2 = (PotentialUpdate) object2;
if (update1.getPosition() < update2.getPosition()) return -1;
if (update1.getPosition() > update2.getPosition()) return 1;
return 0;
}
}
private static class PriorityComparator implements Comparator {
public int compare(Object object1, Object object2) {
PotentialUpdate update1 = (PotentialUpdate) object1;
PotentialUpdate update2 = (PotentialUpdate) object2;
if (update1.getPriority() < update2.getPriority()) return -1;
if (update1.getPriority() > update2.getPriority()) return 1;
return 0;
}
}
private static class CombinedComparator implements Comparator {
private Comparator comparator1;
private Comparator comparator2;
public CombinedComparator(Comparator c1, Comparator c2) {
this.comparator1 = c1;
this.comparator2 = c2;
}
public int compare(Object object1, Object object2) {
int result = comparator1.compare(object1, object2);
if (result == 0)
return comparator2.compare(object1, object2);
else
return result;
}
}
private static class InverseComparator implements Comparator {
private Comparator comparator;
public InverseComparator(Comparator c) {
this.comparator = c;
}
public int compare(Object object1, Object object2) {
return -comparator.compare(object1, object2);
}
}
}
| true
| true
|
protected void filterUpdates() {
if (log.isDebugEnabled())
printAllUpdates("Potential updates:");
fineGrainedUpdates.keySet().removeAll(fullReplaceUpdates.keySet());
SortedMap componentHierarchy = new TreeMap(new PathComparator());
for (Iterator i = getDirtyComponents().iterator(); i.hasNext();) {
SComponent component = (SComponent) i.next();
if ((!component.isRecursivelyVisible() && !(component instanceof SMenu)) ||
component.getParentFrame() == null) {
fullReplaceUpdates.remove(component);
fineGrainedUpdates.remove(component);
} else {
componentHierarchy.put(getPath(component).toString(), component);
}
}
for (Iterator i = componentHierarchy.keySet().iterator(); i.hasNext();) {
String topPath = (String) i.next();
if (fullReplaceUpdates.containsKey(componentHierarchy.get(topPath))) {
while (i.hasNext()) {
String subPath = (String) i.next();
if (subPath.startsWith(topPath)) {
fullReplaceUpdates.remove(componentHierarchy.get(subPath));
fineGrainedUpdates.remove(componentHierarchy.get(subPath));
i.remove();
}
}
}
i = componentHierarchy.tailMap(topPath + "\0").keySet().iterator();
}
if (log.isDebugEnabled())
printAllUpdates("Effective updates:");
}
|
protected void filterUpdates() {
if (log.isDebugEnabled())
printAllUpdates("Potential updates:");
fineGrainedUpdates.keySet().removeAll(fullReplaceUpdates.keySet());
SortedMap componentHierarchy = new TreeMap(new PathComparator());
for (Iterator i = getDirtyComponents().iterator(); i.hasNext();) {
SComponent component = (SComponent) i.next();
if ((!component.isRecursivelyVisible() && !(component instanceof SMenu)) ||
component.getParentFrame() == null) {
fullReplaceUpdates.remove(component);
fineGrainedUpdates.remove(component);
} else {
componentHierarchy.put(getPath(component).toString(), component);
}
}
for (Iterator i = componentHierarchy.keySet().iterator(); i.hasNext();) {
String topPath = (String) i.next();
if (fullReplaceUpdates.containsKey(componentHierarchy.get(topPath))) {
while (i.hasNext()) {
String subPath = (String) i.next();
if (subPath.startsWith(topPath + "/")) {
fullReplaceUpdates.remove(componentHierarchy.get(subPath));
fineGrainedUpdates.remove(componentHierarchy.get(subPath));
i.remove();
}
}
}
i = componentHierarchy.tailMap(topPath + "\0").keySet().iterator();
}
if (log.isDebugEnabled())
printAllUpdates("Effective updates:");
}
|
diff --git a/src/me/sd5/pvplogger/PLConfig.java b/src/me/sd5/pvplogger/PLConfig.java
index 0c8f19c..ebbf4b4 100644
--- a/src/me/sd5/pvplogger/PLConfig.java
+++ b/src/me/sd5/pvplogger/PLConfig.java
@@ -1,26 +1,26 @@
package me.sd5.pvplogger;
import org.bukkit.configuration.file.FileConfiguration;
public class PLConfig {
private static FileConfiguration config;
//Settings
public static String dbUrl = null;
public static String dbUser = null;
public static String dbPassword = null;
public static String dbTable = null;
public static void load(FileConfiguration c) {
config = c;
config.options().copyDefaults(true);
config.options().header("PVPLogger config file.");
dbUrl = "jdbc:mysql://" + config.getString("mysql-connection.host") + ":" + config.getString("mysql-connection.port") + "/" + config.getString("mysql-connection.database");
dbUser = config.getString("mysql-connection.user");
- dbPassword = config.getString("mysql-connection.passwort");
+ dbPassword = config.getString("mysql-connection.password");
dbTable = config.getString("mysql-connection.table");
}
}
| true
| true
|
public static void load(FileConfiguration c) {
config = c;
config.options().copyDefaults(true);
config.options().header("PVPLogger config file.");
dbUrl = "jdbc:mysql://" + config.getString("mysql-connection.host") + ":" + config.getString("mysql-connection.port") + "/" + config.getString("mysql-connection.database");
dbUser = config.getString("mysql-connection.user");
dbPassword = config.getString("mysql-connection.passwort");
dbTable = config.getString("mysql-connection.table");
}
|
public static void load(FileConfiguration c) {
config = c;
config.options().copyDefaults(true);
config.options().header("PVPLogger config file.");
dbUrl = "jdbc:mysql://" + config.getString("mysql-connection.host") + ":" + config.getString("mysql-connection.port") + "/" + config.getString("mysql-connection.database");
dbUser = config.getString("mysql-connection.user");
dbPassword = config.getString("mysql-connection.password");
dbTable = config.getString("mysql-connection.table");
}
|
diff --git a/src/com/redhat/qe/tools/RemoteFileTasks.java b/src/com/redhat/qe/tools/RemoteFileTasks.java
index a7f3214..2e7c087 100644
--- a/src/com/redhat/qe/tools/RemoteFileTasks.java
+++ b/src/com/redhat/qe/tools/RemoteFileTasks.java
@@ -1,171 +1,171 @@
package com.redhat.qe.tools;
import java.io.File;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import com.redhat.qe.auto.testng.LogMessageUtil;
import com.redhat.qe.auto.testopia.Assert;
import com.trilead.ssh2.Connection;
import com.trilead.ssh2.SCPClient;
public class RemoteFileTasks {
protected static Logger log = Logger.getLogger(RemoteFileTasks.class.getName());
public static final String stdoutFile = "/tmp/stdout";
public static final String stderrFile = "/tmp/stderr";
/**
* Create a file on a remote machine with given contents
* @param conn - A connection object already created to connect to ssh server
* @param filePath - path to the file you want to create (including dir and filename)
* @param contents - contents of the file you want to create
* @throws IOException
* @author jweiss
*/
public static void createFile(Connection conn, String filePath, String contents, String mode) throws IOException {
String dir = new File(filePath).getParent();
String fn = new File(filePath).getName();
log.log(Level.INFO, "Creating " + fn + " in " + dir + " on " + conn.getHostname(), LogMessageUtil.Style.Action);
SCPClient scp = new SCPClient(conn);
scp.put(contents.getBytes(), fn, dir, mode);
}
public static void createFile(Connection conn, String filePath, String contents) throws IOException {
createFile(conn, filePath, contents, "0755");
}
/**
* Use echo to create a file with the given contents. Then use chmod to give permissions to the file.
* @param runner
* @param filePath - absolute path to the file create
* @param contents - contents of the file
* @param perms - optional chmod options to apply to the filePath (e.g. "a+x")
* @return - exit code
* @author jsefler
*/
public static int createFile(SSHCommandRunner runner, String filePath, String contents, String perms) {
int exitCode = runCommandAndWait(runner, "echo -n -e '"+contents+"' > "+filePath, LogMessageUtil.action());
if (exitCode==0 && perms!=null) exitCode = runCommandAndWait(runner, "chmod "+perms+" "+filePath, LogMessageUtil.action());
return exitCode;
}
/**
* Copy file(s) onto a remote machine
* @param conn - A connection object already created to connect to ssh server
* @param dest - path where the file(s) should go on the remote machine (must be dir)
* @param source - one or more paths to the file(s) you want to copy to the remote dir
* @throws IOException
* @author jweiss
*/
public static void copyFile(Connection conn, String dest, String... sources ) throws IOException {
for (String source: sources)
log.log(Level.INFO, "Copying " + source + " to " + dest + " on " + conn.getHostname(), LogMessageUtil.Style.Action);
SCPClient scp = new SCPClient(conn);
scp.put(sources, dest);
}
/**
* Use sed to search and replace content within a file.<br>
* sed -i 's/regexp/replacement/g' filePath
* @param runner
* @param filePath - absolute path to the file to be searched and replaced
* @param regexp - the regular expression used to match a pattern for replacement
* @param replacement - the replacement content
* @return - exit code from sed
*/
public static int searchReplaceFile (SSHCommandRunner runner, String filePath, String regexp, String replacement) {
return runCommandAndWait(runner, "sed -i 's/"+regexp+"/"+replacement+"/g' " + filePath, LogMessageUtil.action());
}
/**
* Use grep to search for the existence of an extended regular expression within a file.<br>
* grep -E 'searchTerm' filePath
* @param runner
* @param filePath - absolute path to the file to be searched
* @param pattern - an extended regular expression (man grep for help)
* @return - exit code from grep
*/
public static int grepFile (SSHCommandRunner runner, String filePath, String pattern) {
return runCommandAndWait(runner, "grep -E '" + pattern + "' " + filePath, LogMessageUtil.info());
}
/**
* Use sed to delete lines from a file.<br>
* sed -i '/containingText/d' filePath
* @param runner
* @param filePath - absolute path to the file from which lines will be deleted
* @param containingText - delete lines containing a match to this text
* @return - exit code from sed
* @author jsefler
*/
public static int deleteLines (SSHCommandRunner runner, String filePath, String containingText) {
return runCommandAndWait(runner, "sed -i '/"+containingText+"/d' " + filePath, LogMessageUtil.action());
}
/**
* Test for the existence of a file.<br>
* test -e filePath && echo 1 || echo 0
* @param runner
* @param filePath - absolute path to the file to test for existence
* @return 1 (file exists), 0 (file does not exist), -1 (could not determine existence)
* @author jsefler
*/
public static int testFileExists (SSHCommandRunner runner, String filePath) {
runCommandAndWait(runner, "test -e "+filePath+" && echo 1 || echo 0", LogMessageUtil.info());
if (runner.getStdout().trim().equals("1")) return 1;
if (runner.getStdout().trim().equals("0")) return 0;
return -1;
}
public static int runCommandAndWait(SSHCommandRunner runner, String command, LogRecord logRecord){
return runner.runCommandAndWait(command,logRecord);
//return runner.runCommandAndWait(command,Long.valueOf(30000),logRecord); // timeout after 30 sec
}
public static int runAugeasCommand(SSHCommandRunner runner, String command, LogRecord logRecord){
return runCommandAndWait(runner, String.format("echo -e \"%s\nsave\n\" | augtool", command), logRecord);
}
public static int updateAugeasConfig(SSHCommandRunner runner, String augeusPath, String newValue){
if (newValue == null)
return runAugeasCommand(runner, String.format("rm %s", augeusPath), LogMessageUtil.action());
else
return runAugeasCommand(runner, String.format("set %s '%s'", augeusPath, newValue), LogMessageUtil.action());
}
/**
* Use the sshCommandRunner to execute the given command and verify the output
* contains an expected grep expression. Moreover, the stdout and stderr strings are
* redirected to this.stdoutFile and this.stderrFile which you can subsequently
* use for further post processing before the next call to runCommandAndAssert(...).
* @param command - command to execute with options
* @param stdoutGrepExpression - if !null, stdout is asserted to contain a match to this grep expression
* @param stderrGrepExpression - if !null, stderr is asserted to contain a match to this grep expression
* @param expectedExitCode - expected exit code from the command (usually 0 on success, non-0 on failure)
* @author jsefler
*/
public static void runCommandAndAssert(SSHCommandRunner sshCommandRunner, String command, String stdoutGrepExpression, String stderrGrepExpression, int expectedExitCode) {
//String runCommand = String.format("(%s | tee %s) 3>&1 1>&2 2>&3 | tee %s", command, stdoutFile, stderrFile); // the problem with this is that the exit code is lost
String runCommand = String.format("%s 1>%s 2>%s", command, stdoutFile, stderrFile);
int exitCode = sshCommandRunner.runCommandAndWait(runCommand);
if (exitCode!=expectedExitCode) {
sshCommandRunner.runCommandAndWait("echo 'Stdout from: "+command+"'; cat "+stdoutFile); // cheap way to log stdoutFile
sshCommandRunner.runCommandAndWait("echo 'Stderr from: "+command+"'; cat "+stderrFile); // cheap way to log stderrFile
- } Assert.assertEquals(sshCommandRunner.runCommandAndWait(runCommand),Integer.valueOf(expectedExitCode));
+ } Assert.assertEquals(exitCode,expectedExitCode);
if (stdoutGrepExpression!=null) {
sshCommandRunner.runCommandAndWait("echo 'Stdout from: "+command+"'; cat "+stdoutFile); // cheap way to log stdoutFile
Assert.assertEquals(RemoteFileTasks.grepFile(sshCommandRunner, stdoutFile, stdoutGrepExpression),0,"Stdout contains a match grepping for extended regular expression '"+stdoutGrepExpression+"' (0 means match)");
}
if (stderrGrepExpression!=null) {
sshCommandRunner.runCommandAndWait("echo 'Stderr from: "+command+"'; cat "+stderrFile); // cheap way to log stderrFile
Assert.assertEquals(RemoteFileTasks.grepFile(sshCommandRunner, stderrFile, stderrGrepExpression),0,"Stderr contains a match grepping for extended regular expression '"+stderrGrepExpression+"' (0 means match)");
}
}
}
| true
| true
|
public static void runCommandAndAssert(SSHCommandRunner sshCommandRunner, String command, String stdoutGrepExpression, String stderrGrepExpression, int expectedExitCode) {
//String runCommand = String.format("(%s | tee %s) 3>&1 1>&2 2>&3 | tee %s", command, stdoutFile, stderrFile); // the problem with this is that the exit code is lost
String runCommand = String.format("%s 1>%s 2>%s", command, stdoutFile, stderrFile);
int exitCode = sshCommandRunner.runCommandAndWait(runCommand);
if (exitCode!=expectedExitCode) {
sshCommandRunner.runCommandAndWait("echo 'Stdout from: "+command+"'; cat "+stdoutFile); // cheap way to log stdoutFile
sshCommandRunner.runCommandAndWait("echo 'Stderr from: "+command+"'; cat "+stderrFile); // cheap way to log stderrFile
} Assert.assertEquals(sshCommandRunner.runCommandAndWait(runCommand),Integer.valueOf(expectedExitCode));
if (stdoutGrepExpression!=null) {
sshCommandRunner.runCommandAndWait("echo 'Stdout from: "+command+"'; cat "+stdoutFile); // cheap way to log stdoutFile
Assert.assertEquals(RemoteFileTasks.grepFile(sshCommandRunner, stdoutFile, stdoutGrepExpression),0,"Stdout contains a match grepping for extended regular expression '"+stdoutGrepExpression+"' (0 means match)");
}
if (stderrGrepExpression!=null) {
sshCommandRunner.runCommandAndWait("echo 'Stderr from: "+command+"'; cat "+stderrFile); // cheap way to log stderrFile
Assert.assertEquals(RemoteFileTasks.grepFile(sshCommandRunner, stderrFile, stderrGrepExpression),0,"Stderr contains a match grepping for extended regular expression '"+stderrGrepExpression+"' (0 means match)");
}
}
|
public static void runCommandAndAssert(SSHCommandRunner sshCommandRunner, String command, String stdoutGrepExpression, String stderrGrepExpression, int expectedExitCode) {
//String runCommand = String.format("(%s | tee %s) 3>&1 1>&2 2>&3 | tee %s", command, stdoutFile, stderrFile); // the problem with this is that the exit code is lost
String runCommand = String.format("%s 1>%s 2>%s", command, stdoutFile, stderrFile);
int exitCode = sshCommandRunner.runCommandAndWait(runCommand);
if (exitCode!=expectedExitCode) {
sshCommandRunner.runCommandAndWait("echo 'Stdout from: "+command+"'; cat "+stdoutFile); // cheap way to log stdoutFile
sshCommandRunner.runCommandAndWait("echo 'Stderr from: "+command+"'; cat "+stderrFile); // cheap way to log stderrFile
} Assert.assertEquals(exitCode,expectedExitCode);
if (stdoutGrepExpression!=null) {
sshCommandRunner.runCommandAndWait("echo 'Stdout from: "+command+"'; cat "+stdoutFile); // cheap way to log stdoutFile
Assert.assertEquals(RemoteFileTasks.grepFile(sshCommandRunner, stdoutFile, stdoutGrepExpression),0,"Stdout contains a match grepping for extended regular expression '"+stdoutGrepExpression+"' (0 means match)");
}
if (stderrGrepExpression!=null) {
sshCommandRunner.runCommandAndWait("echo 'Stderr from: "+command+"'; cat "+stderrFile); // cheap way to log stderrFile
Assert.assertEquals(RemoteFileTasks.grepFile(sshCommandRunner, stderrFile, stderrGrepExpression),0,"Stderr contains a match grepping for extended regular expression '"+stderrGrepExpression+"' (0 means match)");
}
}
|
diff --git a/src/main/java/com/mike724/email/EmailManager.java b/src/main/java/com/mike724/email/EmailManager.java
index 70cf5c1..a75b9b7 100644
--- a/src/main/java/com/mike724/email/EmailManager.java
+++ b/src/main/java/com/mike724/email/EmailManager.java
@@ -1,63 +1,64 @@
package com.mike724.email;
import java.io.File;
import java.io.FileWriter;
import java.io.PrintWriter;
import java.util.Set;
import org.bukkit.configuration.file.FileConfiguration;
public class EmailManager {
private ConfigAccessor configA;
private FileConfiguration config;
private String root = "emails.";
private Email plugin;
public EmailManager(Email plugin) {
this.plugin = plugin;
configA = new ConfigAccessor(plugin, "emails.yml");
config = configA.getConfig();
}
public void setPlayerEmail(String name, String email) {
config.set(root+name, email);
configA.saveConfig();
}
public String getPlayerEmail(String name) {
return config.getString(root+name);
}
public void removePlayerEmail(String name) {
config.set(root+name, null);
configA.saveConfig();
}
public void export(int type) {
- if(type != 1 || type !=2) {
+ if(!(type == 1 || type == 2)) {
+ plugin.getLogger().info("Incorrect export type");
return;
}
File file = new File(plugin.getDataFolder(), "export-type1.txt");
try {
PrintWriter pw = new PrintWriter(new FileWriter(file));
Set<String> keys = config.getConfigurationSection("emails").getKeys(false);
for(String key : keys) {
String line = "";
if(type == 1) {
line = key+","+config.getString(root+key);
} else if(type == 2) {
line = config.getString(root+key);
}
pw.println(line);
}
pw.close();
plugin.getLogger().info("Export file created at "+file.getPath());
} catch (Exception e) {
plugin.getLogger().severe("Could not export emails");
e.printStackTrace();
return;
}
}
}
| true
| true
|
public void export(int type) {
if(type != 1 || type !=2) {
return;
}
File file = new File(plugin.getDataFolder(), "export-type1.txt");
try {
PrintWriter pw = new PrintWriter(new FileWriter(file));
Set<String> keys = config.getConfigurationSection("emails").getKeys(false);
for(String key : keys) {
String line = "";
if(type == 1) {
line = key+","+config.getString(root+key);
} else if(type == 2) {
line = config.getString(root+key);
}
pw.println(line);
}
pw.close();
plugin.getLogger().info("Export file created at "+file.getPath());
} catch (Exception e) {
plugin.getLogger().severe("Could not export emails");
e.printStackTrace();
return;
}
}
|
public void export(int type) {
if(!(type == 1 || type == 2)) {
plugin.getLogger().info("Incorrect export type");
return;
}
File file = new File(plugin.getDataFolder(), "export-type1.txt");
try {
PrintWriter pw = new PrintWriter(new FileWriter(file));
Set<String> keys = config.getConfigurationSection("emails").getKeys(false);
for(String key : keys) {
String line = "";
if(type == 1) {
line = key+","+config.getString(root+key);
} else if(type == 2) {
line = config.getString(root+key);
}
pw.println(line);
}
pw.close();
plugin.getLogger().info("Export file created at "+file.getPath());
} catch (Exception e) {
plugin.getLogger().severe("Could not export emails");
e.printStackTrace();
return;
}
}
|
diff --git a/okapi/core/src/main/java/net/sf/okapi/common/skeleton/ResourceConverter.java b/okapi/core/src/main/java/net/sf/okapi/common/skeleton/ResourceConverter.java
index dca00dcc3..9e70c988a 100644
--- a/okapi/core/src/main/java/net/sf/okapi/common/skeleton/ResourceConverter.java
+++ b/okapi/core/src/main/java/net/sf/okapi/common/skeleton/ResourceConverter.java
@@ -1,353 +1,353 @@
/*===========================================================================
Copyright (C) 2008-2010 by the Okapi Framework contributors
-----------------------------------------------------------------------------
This library is free software; you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or (at
your option) any later version.
This library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this library; if not, write to the Free Software Foundation,
Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
See also the full LGPL text here: http://www.gnu.org/copyleft/lesser.html
===========================================================================*/
package net.sf.okapi.common.skeleton;
import java.security.InvalidParameterException;
import java.util.List;
import java.util.logging.Logger;
import net.sf.okapi.common.Event;
import net.sf.okapi.common.EventType;
import net.sf.okapi.common.IResource;
import net.sf.okapi.common.ISkeleton;
import net.sf.okapi.common.LocaleId;
import net.sf.okapi.common.resource.DocumentPart;
import net.sf.okapi.common.resource.IReferenceable;
import net.sf.okapi.common.resource.MultiEvent;
import net.sf.okapi.common.resource.StartDocument;
import net.sf.okapi.common.resource.TextUnit;
/**
* Simplifies events, i.e. splits the generic skeleton of a given event resource into parts to contain no references.
* The skeleton parts are attached to newly created DOCUMENT_PART events.
* Original references are converted either to skeleton parts, or TEXT_UNIT events.
* The sequence of DOCUMENT_PART and TEXT_UNIT events is packed into a single MULTI_EVENT event.
*/
public class ResourceConverter {
private final Logger logger = Logger.getLogger(getClass().getName());
private boolean isMultilingual;
private LocaleId trgLoc;
private GenericSkeletonWriter writer;
private GenericSkeleton newSkel;
public ResourceConverter(boolean isMultilingual, LocaleId trgLoc, String outEncoding) {
super();
this.isMultilingual = isMultilingual;
this.trgLoc = trgLoc;
writer = new GenericSkeletonWriter();
newSkel = new GenericSkeleton();
// StartDocument sd = new StartDocument("");
// sd.setMultilingual(false); // !!!
// writer.processStartDocument(trgLoc, outEncoding, null, null, sd); // sets writer fields
}
// public void setMultilingual(boolean isMultilingual) {
// this.isMultilingual = isMultilingual;
// }
//
// public void setTargetLocale(LocaleId trgLoc) {
// this.trgLoc = trgLoc;
// }
//
// private void addEvent(MultiEvent me, Event event) {
// // TODO merge adjacent DocumentParts
// }
//
// private void addDP(MultiEvent me, String id, GenericSkeleton skel) {
// DocumentPart dp = new DocumentPart(id, false, skel);
// addEvent(me, new Event(EventType.DOCUMENT_PART, dp));
// }
//
/**
* Merges adjacent document parts into one. Will work for simple resources only.
*/
public static MultiEvent packMultiEvent(MultiEvent me) {
Event prevEvent = null;
MultiEvent newME = new MultiEvent();
newME.setId(me.getId());
for (Event event : me) {
if (prevEvent != null &&
event != null &&
prevEvent.getEventType() == EventType.DOCUMENT_PART &&
event.getEventType() == EventType.DOCUMENT_PART) {
// Append to prev event's skeleton
IResource res = event.getResource();
ISkeleton skel = res.getSkeleton();
if (skel instanceof GenericSkeleton) {
IResource prevRes = prevEvent.getResource();
ISkeleton prevSkel = prevRes.getSkeleton();
if (prevSkel instanceof GenericSkeleton)
((GenericSkeleton) prevSkel).add((GenericSkeleton) skel);
}
}
else {
newME.addEvent(event);
prevEvent = event;
}
}
return newME;
}
/**
* Converts a given event into a multi-event if it contains references in its skeleton, or passes it on if
* either the skeleton is no instance of GenericSkeleton, contains no references, or the resource is referent.
* @param event the given event
* @return the given event or a newly created multi-event
*/
public Event convert(Event event) {
if (event == null)
throw new InvalidParameterException("Event cannot be null");
IResource res = event.getResource();
if (res == null)
return event;
ISkeleton skel = res.getSkeleton();
- if (!(skel == null)) {
+ if (skel == null) {
return event;
}
if (!(skel instanceof GenericSkeleton)) {
return event;
}
if (res instanceof IReferenceable) {
if (((IReferenceable) res).isReferent()) {
writer.addToReferents(event);
// The referent is not processed at this point (only later from an event referencing it)
return event;
}
}
// switch (event.getEventType()) {
// case START_DOCUMENT:
// case END_DOCUMENT:
// case START_SUBDOCUMENT:
// case END_SUBDOCUMENT:
// case START_GROUP:
// case END_GROUP:
// case TEXT_UNIT:
// case DOCUMENT_PART:
// break;
// default:
// return event; // All other events with a skeleton are not processed
// }
// Process the resource's skeleton
MultiEvent me = new MultiEvent();
processResource(res, me);
// Different event types are processed differently
switch (event.getEventType()) {
case START_DOCUMENT:
StartDocument sd = (StartDocument) res;
sd.setMultilingual(false); // Simple resources
// No break here
case END_DOCUMENT:
case START_SUBDOCUMENT:
case END_SUBDOCUMENT:
case START_GROUP:
case END_GROUP:
// The original event (the skeleton should be deleted) precedes in the resulting multi-event DPs/TUs
// created from its original skeleton parts
res.setSkeleton(null);
me.addEvent(event, 0);
break;
case TEXT_UNIT:
case DOCUMENT_PART:
break;
default:
return event;
}
return new Event(EventType.MULTI_EVENT, packMultiEvent(me));
}
// public Event toMultiEvent(Event event, LocaleId targetLocale) {
// if (event == null)
// throw new InvalidParameterException("Event cannot be null");
//
// IResource res = event.getResource();
// if (res == null)
// return wrapEvent(event);
//
// ISkeleton skel = res.getSkeleton();
// if (!(skel instanceof GenericSkeleton)) {
// // TODO log
// return wrapEvent(event);
// }
//
// MultiEvent me = new MultiEvent();
// List<GenericSkeletonPart> parts = ((GenericSkeleton) skel).getParts();
//
// switch (event.getEventType()) {
// case TEXT_UNIT:
// }
//
//
//
// return wrapEvent(event); // TODO replace with real stuff
// }
//
// public static Event fromMultiEvent(Event event) {
// if (event == null)
// throw new InvalidParameterException("Event cannot be null");
// if (event.getEventType() != EventType.MULTI_EVENT)
// throw new InvalidParameterException("MULTI_EVENT type is expected");
// MultiEvent me = (MultiEvent) event.getResource();
// //if (me.iterator().)
//
// return null;
// }
//
// private Event wrapEvent(Event event) {
// MultiEvent me = new MultiEvent();
// me.addEvent(event);
// return new Event(EventType.MULTI_EVENT, me);
// }
private void flushSkeleton(String resId, int dpIndex, MultiEvent me) {
if (newSkel.isEmpty()) return;
me.addEvent(new Event(EventType.DOCUMENT_PART, new DocumentPart(String.format("%s_%d", resId, dpIndex), false, newSkel)));
newSkel = new GenericSkeleton(); // newSkel.clear() would damage an already sent skeleton
}
private void addTU(MultiEvent me, String resId, int tuIndex, TextUnit tu) {
String id = null;
if (tuIndex == 1)
id = resId;
else {
logger.warning("Duplicate TU: " + resId);
id = String.format("%s_%d", resId, tuIndex);
}
TextUnit newTU = tu.clone();
newTU.setId(id);
newTU.setSkeleton(null);
me.addEvent(new Event(EventType.TEXT_UNIT, newTU));
}
/**
* Creates events from skeleton parts of a given resource, adds created events to a given multi-event resource.
*/
private void processResource(IResource resource, MultiEvent me) {
if (resource == null)
throw new InvalidParameterException("Resource cannot be null");
if (me == null)
throw new InvalidParameterException("MultiEvent object cannot be null");
ISkeleton skel = resource.getSkeleton();
if (!(skel instanceof GenericSkeleton)) return;
List<GenericSkeletonPart> parts = ((GenericSkeleton) skel).getParts();
int dpCounter = 0;
int tuCounter = 0;
String resId = resource.getId();
// if (resource instanceof INameable)
// mimeType = ((INameable) resource).getMimeType();
for (GenericSkeletonPart part : parts) {
if (SkeletonUtil.isText(part)) {
//newSkel.add(part.toString());
newSkel.add(writer.getString(part, 1));
}
else if (SkeletonUtil.isReference(part)) {
flushSkeleton(resId, ++dpCounter, me);
IReferenceable referent = writer.getReference(SkeletonUtil.getRefId(part));
if (referent instanceof IResource)
processResource((IResource) referent, me);
}
else if (SkeletonUtil.isSourcePlaceholder(part, resource)) {
processSourcePlaceholder(part, resource, me, resId, tuCounter, dpCounter);
}
else if (SkeletonUtil.isTargetPlaceholder(part, resource)) {
processTargetPlaceholder(part, resource, me, resId, tuCounter, dpCounter);
}
else if (SkeletonUtil.isValuePlaceholder(part, resource)) {
// For both isMultilingual true/false
newSkel.add(writer.getString(part, 1));
}
else if (SkeletonUtil.isExtSourcePlaceholder(part, resource)) {
checkExtParent(part.getParent(), resId);
processSourcePlaceholder(part, resource, me, resId, tuCounter, dpCounter);
}
else if (SkeletonUtil.isExtTargetPlaceholder(part, resource)) {
checkExtParent(part.getParent(), resId);
processTargetPlaceholder(part, resource, me, resId, tuCounter, dpCounter);
}
else if (SkeletonUtil.isExtValuePlaceholder(part, resource)) {
// For both isMultilingual true/false
checkExtParent(part.getParent(), resId);
newSkel.add(writer.getString(part, 1));
}
}
flushSkeleton(resId, ++dpCounter, me); // Flush remaining skeleton tail
}
private void processSourcePlaceholder(GenericSkeletonPart part, IResource resource,
MultiEvent me, String resId, int tuCounter, int dpCounter) {
if (isMultilingual) {
if (part.parent instanceof TextUnit)
newSkel.add(writer.getContent((TextUnit) part.parent, null, 0)); // Source goes to skeleton
else {
logger.warning("The self-reference must be a text-unit: " + resId);
newSkel.add(part.parent.toString());
}
}
else {
flushSkeleton(resId, ++dpCounter, me);
addTU(me, resId, ++tuCounter, (TextUnit) resource);
}
}
private void processTargetPlaceholder(GenericSkeletonPart part, IResource resource,
MultiEvent me, String resId, int tuCounter, int dpCounter) {
// For both isMultilingual true/false
if (part.getLocale() == trgLoc) {
flushSkeleton(resId, ++dpCounter, me);
addTU(me, resId, ++tuCounter, (TextUnit) resource);
}
else {
newSkel.add(writer.getContent((TextUnit) resource, trgLoc, 1));
}
}
private boolean checkExtParent(IResource parent, String resId) {
if (parent instanceof IReferenceable) {
IReferenceable r = (IReferenceable) parent;
if (!r.isReferent()) {
logger.warning("Referent flag is not set in parent: " + resId);
return false;
}
return true;
}
else {
logger.warning("Invalid parent type: " + resId);
return false;
}
}
}
| true
| true
|
public Event convert(Event event) {
if (event == null)
throw new InvalidParameterException("Event cannot be null");
IResource res = event.getResource();
if (res == null)
return event;
ISkeleton skel = res.getSkeleton();
if (!(skel == null)) {
return event;
}
if (!(skel instanceof GenericSkeleton)) {
return event;
}
if (res instanceof IReferenceable) {
if (((IReferenceable) res).isReferent()) {
writer.addToReferents(event);
// The referent is not processed at this point (only later from an event referencing it)
return event;
}
}
// switch (event.getEventType()) {
// case START_DOCUMENT:
// case END_DOCUMENT:
// case START_SUBDOCUMENT:
// case END_SUBDOCUMENT:
// case START_GROUP:
// case END_GROUP:
// case TEXT_UNIT:
// case DOCUMENT_PART:
// break;
// default:
// return event; // All other events with a skeleton are not processed
// }
// Process the resource's skeleton
MultiEvent me = new MultiEvent();
processResource(res, me);
// Different event types are processed differently
switch (event.getEventType()) {
case START_DOCUMENT:
StartDocument sd = (StartDocument) res;
sd.setMultilingual(false); // Simple resources
// No break here
case END_DOCUMENT:
case START_SUBDOCUMENT:
case END_SUBDOCUMENT:
case START_GROUP:
case END_GROUP:
// The original event (the skeleton should be deleted) precedes in the resulting multi-event DPs/TUs
// created from its original skeleton parts
res.setSkeleton(null);
me.addEvent(event, 0);
break;
case TEXT_UNIT:
case DOCUMENT_PART:
break;
default:
return event;
}
return new Event(EventType.MULTI_EVENT, packMultiEvent(me));
}
|
public Event convert(Event event) {
if (event == null)
throw new InvalidParameterException("Event cannot be null");
IResource res = event.getResource();
if (res == null)
return event;
ISkeleton skel = res.getSkeleton();
if (skel == null) {
return event;
}
if (!(skel instanceof GenericSkeleton)) {
return event;
}
if (res instanceof IReferenceable) {
if (((IReferenceable) res).isReferent()) {
writer.addToReferents(event);
// The referent is not processed at this point (only later from an event referencing it)
return event;
}
}
// switch (event.getEventType()) {
// case START_DOCUMENT:
// case END_DOCUMENT:
// case START_SUBDOCUMENT:
// case END_SUBDOCUMENT:
// case START_GROUP:
// case END_GROUP:
// case TEXT_UNIT:
// case DOCUMENT_PART:
// break;
// default:
// return event; // All other events with a skeleton are not processed
// }
// Process the resource's skeleton
MultiEvent me = new MultiEvent();
processResource(res, me);
// Different event types are processed differently
switch (event.getEventType()) {
case START_DOCUMENT:
StartDocument sd = (StartDocument) res;
sd.setMultilingual(false); // Simple resources
// No break here
case END_DOCUMENT:
case START_SUBDOCUMENT:
case END_SUBDOCUMENT:
case START_GROUP:
case END_GROUP:
// The original event (the skeleton should be deleted) precedes in the resulting multi-event DPs/TUs
// created from its original skeleton parts
res.setSkeleton(null);
me.addEvent(event, 0);
break;
case TEXT_UNIT:
case DOCUMENT_PART:
break;
default:
return event;
}
return new Event(EventType.MULTI_EVENT, packMultiEvent(me));
}
|
diff --git a/net.sourceforge.vrapper.core/src/net/sourceforge/vrapper/vim/commands/PasteAfterCommand.java b/net.sourceforge.vrapper.core/src/net/sourceforge/vrapper/vim/commands/PasteAfterCommand.java
index 7c9a98e6..1f65d91b 100644
--- a/net.sourceforge.vrapper.core/src/net/sourceforge/vrapper/vim/commands/PasteAfterCommand.java
+++ b/net.sourceforge.vrapper.core/src/net/sourceforge/vrapper/vim/commands/PasteAfterCommand.java
@@ -1,85 +1,87 @@
package net.sourceforge.vrapper.vim.commands;
import net.sourceforge.vrapper.platform.CursorService;
import net.sourceforge.vrapper.platform.TextContent;
import net.sourceforge.vrapper.utils.ContentType;
import net.sourceforge.vrapper.utils.LineInformation;
import net.sourceforge.vrapper.utils.Position;
import net.sourceforge.vrapper.utils.StringUtils;
import net.sourceforge.vrapper.utils.VimUtils;
import net.sourceforge.vrapper.vim.EditorAdaptor;
import net.sourceforge.vrapper.vim.commands.motions.StickyColumnPolicy;
import net.sourceforge.vrapper.vim.register.RegisterContent;
public class PasteAfterCommand extends CountAwareCommand {
public static final PasteAfterCommand CURSOR_ON_TEXT = new PasteAfterCommand(false);
public static final PasteAfterCommand CURSOR_AFTER_TEXT = new PasteAfterCommand(true);
private boolean placeCursorAfter;
private PasteAfterCommand(boolean placeCursorAfter) {
this.placeCursorAfter = placeCursorAfter;
}
@Override
public void execute(EditorAdaptor editorAdaptor, int count) {
if (count == NO_COUNT_GIVEN) {
count = 1;
}
final CursorService cursorService = editorAdaptor.getCursorService();
RegisterContent registerContent = editorAdaptor.getRegisterManager().getActiveRegister().getContent();
String text = registerContent.getText();
text = VimUtils.replaceNewLines(text, editorAdaptor.getConfiguration().getNewLine());
TextContent content = editorAdaptor.getModelContent();
int offset = editorAdaptor.getPosition().getModelOffset();
LineInformation line = content.getLineInformationOfOffset(offset);
int lineNo = line.getNumber() + 1;
int position;
switch (registerContent.getPayloadType()) {
case LINES:
// FIXME: position calculation for count > 1
if (lineNo < content.getNumberOfLines()) {
offset = content.getLineInformation(lineNo).getBeginOffset();
position = offset;
} else {
offset = content.getTextLength();
String newLine = editorAdaptor.getConfiguration().getNewLine();
text = newLine + VimUtils.stripLastNewline(text);
position = offset + newLine.length();
}
break;
case TEXT:
offset = Math.min(line.getEndOffset(), offset + 1);
position = offset + text.length() * count;
- if (!placeCursorAfter || text.length() == 0)
- position -= 1;
+ // Move cursor back, unless we should be after the pasted text or if line is empty.
+ if ( ( ! placeCursorAfter || text.length() == 0) && line.getLength() > 0) {
+ position -= 1;
+ }
break;
case TEXT_RECTANGLE:
BlockPasteHelper.execute(editorAdaptor, count, 1, placeCursorAfter);
return;
default:
return;
}
try {
editorAdaptor.getHistory().beginCompoundChange();
content.replace(offset, 0, StringUtils.multiply(text, count));
int followingLine = lineNo + count;
if (registerContent.getPayloadType() == ContentType.LINES && placeCursorAfter
&& followingLine < content.getNumberOfLines()) {
position = content.getLineInformation(followingLine).getBeginOffset();
}
Position destination = cursorService.newPositionForModelOffset(position);
editorAdaptor.setPosition(destination, StickyColumnPolicy.ON_CHANGE);
} finally {
editorAdaptor.getHistory().endCompoundChange();
}
}
@Override
public CountAwareCommand repetition() {
return this;
}
}
| true
| true
|
public void execute(EditorAdaptor editorAdaptor, int count) {
if (count == NO_COUNT_GIVEN) {
count = 1;
}
final CursorService cursorService = editorAdaptor.getCursorService();
RegisterContent registerContent = editorAdaptor.getRegisterManager().getActiveRegister().getContent();
String text = registerContent.getText();
text = VimUtils.replaceNewLines(text, editorAdaptor.getConfiguration().getNewLine());
TextContent content = editorAdaptor.getModelContent();
int offset = editorAdaptor.getPosition().getModelOffset();
LineInformation line = content.getLineInformationOfOffset(offset);
int lineNo = line.getNumber() + 1;
int position;
switch (registerContent.getPayloadType()) {
case LINES:
// FIXME: position calculation for count > 1
if (lineNo < content.getNumberOfLines()) {
offset = content.getLineInformation(lineNo).getBeginOffset();
position = offset;
} else {
offset = content.getTextLength();
String newLine = editorAdaptor.getConfiguration().getNewLine();
text = newLine + VimUtils.stripLastNewline(text);
position = offset + newLine.length();
}
break;
case TEXT:
offset = Math.min(line.getEndOffset(), offset + 1);
position = offset + text.length() * count;
if (!placeCursorAfter || text.length() == 0)
position -= 1;
break;
case TEXT_RECTANGLE:
BlockPasteHelper.execute(editorAdaptor, count, 1, placeCursorAfter);
return;
default:
return;
}
try {
editorAdaptor.getHistory().beginCompoundChange();
content.replace(offset, 0, StringUtils.multiply(text, count));
int followingLine = lineNo + count;
if (registerContent.getPayloadType() == ContentType.LINES && placeCursorAfter
&& followingLine < content.getNumberOfLines()) {
position = content.getLineInformation(followingLine).getBeginOffset();
}
Position destination = cursorService.newPositionForModelOffset(position);
editorAdaptor.setPosition(destination, StickyColumnPolicy.ON_CHANGE);
} finally {
editorAdaptor.getHistory().endCompoundChange();
}
}
|
public void execute(EditorAdaptor editorAdaptor, int count) {
if (count == NO_COUNT_GIVEN) {
count = 1;
}
final CursorService cursorService = editorAdaptor.getCursorService();
RegisterContent registerContent = editorAdaptor.getRegisterManager().getActiveRegister().getContent();
String text = registerContent.getText();
text = VimUtils.replaceNewLines(text, editorAdaptor.getConfiguration().getNewLine());
TextContent content = editorAdaptor.getModelContent();
int offset = editorAdaptor.getPosition().getModelOffset();
LineInformation line = content.getLineInformationOfOffset(offset);
int lineNo = line.getNumber() + 1;
int position;
switch (registerContent.getPayloadType()) {
case LINES:
// FIXME: position calculation for count > 1
if (lineNo < content.getNumberOfLines()) {
offset = content.getLineInformation(lineNo).getBeginOffset();
position = offset;
} else {
offset = content.getTextLength();
String newLine = editorAdaptor.getConfiguration().getNewLine();
text = newLine + VimUtils.stripLastNewline(text);
position = offset + newLine.length();
}
break;
case TEXT:
offset = Math.min(line.getEndOffset(), offset + 1);
position = offset + text.length() * count;
// Move cursor back, unless we should be after the pasted text or if line is empty.
if ( ( ! placeCursorAfter || text.length() == 0) && line.getLength() > 0) {
position -= 1;
}
break;
case TEXT_RECTANGLE:
BlockPasteHelper.execute(editorAdaptor, count, 1, placeCursorAfter);
return;
default:
return;
}
try {
editorAdaptor.getHistory().beginCompoundChange();
content.replace(offset, 0, StringUtils.multiply(text, count));
int followingLine = lineNo + count;
if (registerContent.getPayloadType() == ContentType.LINES && placeCursorAfter
&& followingLine < content.getNumberOfLines()) {
position = content.getLineInformation(followingLine).getBeginOffset();
}
Position destination = cursorService.newPositionForModelOffset(position);
editorAdaptor.setPosition(destination, StickyColumnPolicy.ON_CHANGE);
} finally {
editorAdaptor.getHistory().endCompoundChange();
}
}
|
diff --git a/src/com/db/ncsu/user/User.java b/src/com/db/ncsu/user/User.java
index 96ad7c3..0ff83f4 100644
--- a/src/com/db/ncsu/user/User.java
+++ b/src/com/db/ncsu/user/User.java
@@ -1,43 +1,43 @@
package com.db.ncsu.user;
import com.db.ncsu.command.Command;
import com.db.ncsu.command.CreateAccount;
import com.db.ncsu.command.CreateEmployee;
import com.db.ncsu.command.*;
public class User {
private Command[] salesCommands;
private Command[] billingCommands;
private Command[] managerCommands;
public User()
{
- salesCommands = new Command[]{ new InsertSpecialOrder(), new UpdateMerchandise(), new UpdateAccount(), new CreateAccount(), new CreateEmployee(), new ShowEmployees(), new CreateVendor(), new CreateStore(), new CreateMerchandise(), new AddVendorPayment(), new AddStoreItem(), new ReviewStoreInventory(), new CheckItemAtStore(), new CheckMerchandiseAvailability(), new ShowStore(), new ReviewBills(), new ViewAllCustomerSpecialOrders(), new GenerateCustomerPurchaseHistory(), new GenerateVendorPurchaseHistory(), new ReviewASpecialOrder(), new VendorStorePurchaseHistory(), new CustomerStorePurchaseHistory(), new ShowTotalInventory(), new ShowAllCustomerBills(), new ShowAllVendorBills(), new UpdateCustomerBillingCycle(), new UpdateSpecialOrderItemStatus(), new UpdateVendor(), new UpdateEmployee()};
+ salesCommands = new Command[]{ new InsertSpecialOrder(), new UpdateMerchandise(), new UpdateAccount(), new CreateAccount(), new CreateEmployee(), new ShowEmployees(), new CreateVendor(), new CreateStore(), new CreateMerchandise(), new AddVendorPayment(), new AddStoreItem(), new ReviewStoreInventory(), new CheckItemAtStore(), new CheckMerchandiseAvailability(), new ShowStore(), new ReviewBills(), new ViewAllCustomerSpecialOrders(), new GenerateCustomerPurchaseHistory(), new GenerateVendorPurchaseHistory(), new ReviewASpecialOrder(), new VendorStorePurchaseHistory(), new CustomerStorePurchaseHistory(), new ShowTotalInventory(), new ShowAllCustomerBills(), new ShowAllVendorBills(), new UpdateCustomerBillingCycle(), new UpdateSpecialOrderItemStatus(), new UpdateVendor(), new UpdateEmployee(), new CreateBillingCycle()};
billingCommands = new Command[]{ new CreateAccount()};
managerCommands = new Command[]{ new CreateEmployee()};
}
public Command[] getSalesCommands() {
return salesCommands;
}
public Command[] billingCommands() {
return billingCommands;
}
public Command[] managerCommands() {
return managerCommands;
}
public Command[] franchiseManagerCommands() {
return salesCommands;
}
public Command[] stockingCommands() {
return salesCommands;
}
}
| true
| true
|
public User()
{
salesCommands = new Command[]{ new InsertSpecialOrder(), new UpdateMerchandise(), new UpdateAccount(), new CreateAccount(), new CreateEmployee(), new ShowEmployees(), new CreateVendor(), new CreateStore(), new CreateMerchandise(), new AddVendorPayment(), new AddStoreItem(), new ReviewStoreInventory(), new CheckItemAtStore(), new CheckMerchandiseAvailability(), new ShowStore(), new ReviewBills(), new ViewAllCustomerSpecialOrders(), new GenerateCustomerPurchaseHistory(), new GenerateVendorPurchaseHistory(), new ReviewASpecialOrder(), new VendorStorePurchaseHistory(), new CustomerStorePurchaseHistory(), new ShowTotalInventory(), new ShowAllCustomerBills(), new ShowAllVendorBills(), new UpdateCustomerBillingCycle(), new UpdateSpecialOrderItemStatus(), new UpdateVendor(), new UpdateEmployee()};
billingCommands = new Command[]{ new CreateAccount()};
managerCommands = new Command[]{ new CreateEmployee()};
}
|
public User()
{
salesCommands = new Command[]{ new InsertSpecialOrder(), new UpdateMerchandise(), new UpdateAccount(), new CreateAccount(), new CreateEmployee(), new ShowEmployees(), new CreateVendor(), new CreateStore(), new CreateMerchandise(), new AddVendorPayment(), new AddStoreItem(), new ReviewStoreInventory(), new CheckItemAtStore(), new CheckMerchandiseAvailability(), new ShowStore(), new ReviewBills(), new ViewAllCustomerSpecialOrders(), new GenerateCustomerPurchaseHistory(), new GenerateVendorPurchaseHistory(), new ReviewASpecialOrder(), new VendorStorePurchaseHistory(), new CustomerStorePurchaseHistory(), new ShowTotalInventory(), new ShowAllCustomerBills(), new ShowAllVendorBills(), new UpdateCustomerBillingCycle(), new UpdateSpecialOrderItemStatus(), new UpdateVendor(), new UpdateEmployee(), new CreateBillingCycle()};
billingCommands = new Command[]{ new CreateAccount()};
managerCommands = new Command[]{ new CreateEmployee()};
}
|
diff --git a/core/src/main/java/uk/ac/imperial/presage2/core/cli/run/SubProcessExecutor.java b/core/src/main/java/uk/ac/imperial/presage2/core/cli/run/SubProcessExecutor.java
index 24dd782..69896c3 100644
--- a/core/src/main/java/uk/ac/imperial/presage2/core/cli/run/SubProcessExecutor.java
+++ b/core/src/main/java/uk/ac/imperial/presage2/core/cli/run/SubProcessExecutor.java
@@ -1,149 +1,148 @@
/**
* Copyright (C) 2011 Sam Macbeth <sm1106 [at] imperial [dot] ac [dot] uk>
*
* This file is part of Presage2.
*
* Presage2 is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Presage2 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser Public License for more details.
*
* You should have received a copy of the GNU Lesser Public License
* along with Presage2. If not, see <http://www.gnu.org/licenses/>.
*/
package uk.ac.imperial.presage2.core.cli.run;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import org.apache.log4j.Logger;
import uk.ac.imperial.presage2.core.cli.Presage2CLI;
import com.google.inject.Singleton;
/**
* A {@link SimulationExecutor} which runs each simulation in separate JVM as a
* sub process.
*
* @author Sam Macbeth
*
*/
@Singleton
public class SubProcessExecutor implements SimulationExecutor {
private final Logger logger = Logger.getLogger(SubProcessExecutor.class);
final int MAX_PROCESSES;
List<Process> running;
Timer processMonitor;
public SubProcessExecutor() {
this(1);
}
SubProcessExecutor(int mAX_PROCESSES) {
super();
MAX_PROCESSES = mAX_PROCESSES;
this.running = Collections.synchronizedList(new ArrayList<Process>(
MAX_PROCESSES));
this.processMonitor = new Timer(true);
this.processMonitor.schedule(new TimerTask() {
@Override
public void run() {
List<Process> completed = new LinkedList<Process>();
for (Process p : running) {
try {
// check exit value to see if process has exited.
int val = p.exitValue();
logger.info("Simulation completed, returned " + val);
completed.add(p);
} catch (IllegalThreadStateException e) {
// process is still running.
}
}
running.removeAll(completed);
}
}, 1000, 1000);
}
@Override
public synchronized void run(long simId)
throws InsufficientResourcesException {
// don't launch more than maxConcurrent processes.
if (this.running() >= maxConcurrent())
throw new InsufficientResourcesException(
"Max number of concurrent processes, " + maxConcurrent()
+ " has been reached");
// set up processbuilder
// see
// http://stackoverflow.com/questions/636367/java-executing-a-java-application-in-a-separate-process/723914#723914
String javaHome = System.getProperty("java.home");
String javaBin = javaHome + File.separator + "bin" + File.separator
+ "java";
String classpath = System.getProperty("java.class.path");
String className = Presage2CLI.class.getCanonicalName();
// if the system classpath only contains classworlds.jar we must rebuild
// classpath from this class's classloader.
if (classpath.split(":").length == 1
&& classpath.matches(".*classworlds.jar.*")) {
ClassLoader sysClassLoader = this.getClass().getClassLoader();
URL[] urls = ((URLClassLoader) sysClassLoader).getURLs();
String separator = System.getProperty("path.separator", ":");
classpath = "";
for (int i = 0; i < urls.length; i++) {
classpath += urls[i].getFile();
if (i >= urls.length - 1)
break;
classpath += separator;
}
}
ProcessBuilder builder = new ProcessBuilder(javaBin, "-cp", classpath,
className, "run", Long.toString(simId));
builder.redirectErrorStream(true);
// start process and gobble streams
try {
logger.info("Starting simulation ID: " + simId
+ " in a new process.");
Process process = builder.start();
StreamGobbler gobbler = new StreamGobbler(process.getInputStream());
gobbler.start();
running.add(process);
} catch (IOException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
+ logger.warn("Error launching process", e);
}
}
@Override
public int running() {
return this.running.size();
}
@Override
public int maxConcurrent() {
return MAX_PROCESSES;
}
@Override
public String toString() {
return "SubProcessExecutor @ localhost";
}
}
| true
| true
|
public synchronized void run(long simId)
throws InsufficientResourcesException {
// don't launch more than maxConcurrent processes.
if (this.running() >= maxConcurrent())
throw new InsufficientResourcesException(
"Max number of concurrent processes, " + maxConcurrent()
+ " has been reached");
// set up processbuilder
// see
// http://stackoverflow.com/questions/636367/java-executing-a-java-application-in-a-separate-process/723914#723914
String javaHome = System.getProperty("java.home");
String javaBin = javaHome + File.separator + "bin" + File.separator
+ "java";
String classpath = System.getProperty("java.class.path");
String className = Presage2CLI.class.getCanonicalName();
// if the system classpath only contains classworlds.jar we must rebuild
// classpath from this class's classloader.
if (classpath.split(":").length == 1
&& classpath.matches(".*classworlds.jar.*")) {
ClassLoader sysClassLoader = this.getClass().getClassLoader();
URL[] urls = ((URLClassLoader) sysClassLoader).getURLs();
String separator = System.getProperty("path.separator", ":");
classpath = "";
for (int i = 0; i < urls.length; i++) {
classpath += urls[i].getFile();
if (i >= urls.length - 1)
break;
classpath += separator;
}
}
ProcessBuilder builder = new ProcessBuilder(javaBin, "-cp", classpath,
className, "run", Long.toString(simId));
builder.redirectErrorStream(true);
// start process and gobble streams
try {
logger.info("Starting simulation ID: " + simId
+ " in a new process.");
Process process = builder.start();
StreamGobbler gobbler = new StreamGobbler(process.getInputStream());
gobbler.start();
running.add(process);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
|
public synchronized void run(long simId)
throws InsufficientResourcesException {
// don't launch more than maxConcurrent processes.
if (this.running() >= maxConcurrent())
throw new InsufficientResourcesException(
"Max number of concurrent processes, " + maxConcurrent()
+ " has been reached");
// set up processbuilder
// see
// http://stackoverflow.com/questions/636367/java-executing-a-java-application-in-a-separate-process/723914#723914
String javaHome = System.getProperty("java.home");
String javaBin = javaHome + File.separator + "bin" + File.separator
+ "java";
String classpath = System.getProperty("java.class.path");
String className = Presage2CLI.class.getCanonicalName();
// if the system classpath only contains classworlds.jar we must rebuild
// classpath from this class's classloader.
if (classpath.split(":").length == 1
&& classpath.matches(".*classworlds.jar.*")) {
ClassLoader sysClassLoader = this.getClass().getClassLoader();
URL[] urls = ((URLClassLoader) sysClassLoader).getURLs();
String separator = System.getProperty("path.separator", ":");
classpath = "";
for (int i = 0; i < urls.length; i++) {
classpath += urls[i].getFile();
if (i >= urls.length - 1)
break;
classpath += separator;
}
}
ProcessBuilder builder = new ProcessBuilder(javaBin, "-cp", classpath,
className, "run", Long.toString(simId));
builder.redirectErrorStream(true);
// start process and gobble streams
try {
logger.info("Starting simulation ID: " + simId
+ " in a new process.");
Process process = builder.start();
StreamGobbler gobbler = new StreamGobbler(process.getInputStream());
gobbler.start();
running.add(process);
} catch (IOException e) {
logger.warn("Error launching process", e);
}
}
|
diff --git a/task1/test/ee/ut/math/tvt/BSS/StockItemTest.java b/task1/test/ee/ut/math/tvt/BSS/StockItemTest.java
index f530cdb..0d89371 100644
--- a/task1/test/ee/ut/math/tvt/BSS/StockItemTest.java
+++ b/task1/test/ee/ut/math/tvt/BSS/StockItemTest.java
@@ -1,44 +1,44 @@
package ee.ut.math.tvt.BSS;
import static org.junit.Assert.assertEquals;
import org.junit.Before;
import org.junit.Test;
import ee.ut.math.tvt.salessystem.domain.data.StockItem;
public class StockItemTest {
private StockItem item1;
@Before
public void setUp() {
long id = 10;
item1 = new StockItem(id, "testItem", "testDescription", 12.0, 5);
}
@Test
public void testClone() {
StockItem item2 = (StockItem) item1.clone();
assertEquals(item2.getId(), 10, 0.0001);
assertEquals(item2.getName(), "testItem");
assertEquals(item2.getPrice(), 12.0, 0.001);
assertEquals(item2.getQuantity(), 5);
assertEquals(item2.getDescription(), "testDescription");
}
@Test
public void testGetColumn() {
long id = ((Long) item1.getColumn(0)).longValue();
assertEquals(id, 10, 0.0001);
String name = (String) item1.getColumn(1);
assertEquals(name, "testItem");
- double price = (double) item1.getColumn(2);
- assertEquals(price, 12.0, 0.001);
- int quantity = (int) item1.getColumn(3);
- assertEquals(quantity, 5);
+ double price = ((Double) item1.getColumn(2)).doubleValue();
+ assertEquals(price, 12.0, 0.0001);
+ int quantity = ((Integer) item1.getColumn(3)).intValue();
+ assertEquals(quantity, 5, 0.0001);
}
@Test (expected = RuntimeException.class)
public void testGetColumnException() {
String other = (String) item1.getColumn(4);
}
}
| true
| true
|
public void testGetColumn() {
long id = ((Long) item1.getColumn(0)).longValue();
assertEquals(id, 10, 0.0001);
String name = (String) item1.getColumn(1);
assertEquals(name, "testItem");
double price = (double) item1.getColumn(2);
assertEquals(price, 12.0, 0.001);
int quantity = (int) item1.getColumn(3);
assertEquals(quantity, 5);
}
|
public void testGetColumn() {
long id = ((Long) item1.getColumn(0)).longValue();
assertEquals(id, 10, 0.0001);
String name = (String) item1.getColumn(1);
assertEquals(name, "testItem");
double price = ((Double) item1.getColumn(2)).doubleValue();
assertEquals(price, 12.0, 0.0001);
int quantity = ((Integer) item1.getColumn(3)).intValue();
assertEquals(quantity, 5, 0.0001);
}
|
diff --git a/examples/org.eclipse.ocl.examples.pivot/src/org/eclipse/ocl/examples/pivot/attributes/DataTypeAttribution.java b/examples/org.eclipse.ocl.examples.pivot/src/org/eclipse/ocl/examples/pivot/attributes/DataTypeAttribution.java
index 9805a3de13..a0874fb6ca 100644
--- a/examples/org.eclipse.ocl.examples.pivot/src/org/eclipse/ocl/examples/pivot/attributes/DataTypeAttribution.java
+++ b/examples/org.eclipse.ocl.examples.pivot/src/org/eclipse/ocl/examples/pivot/attributes/DataTypeAttribution.java
@@ -1,43 +1,44 @@
/**
* <copyright>
*
* Copyright (c) 2012 E.D.Willink and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* E.D.Willink - initial API and implementation
*
* </copyright>
*/
package org.eclipse.ocl.examples.pivot.attributes;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.ocl.examples.pivot.DataType;
import org.eclipse.ocl.examples.pivot.Type;
import org.eclipse.ocl.examples.pivot.scoping.AbstractAttribution;
import org.eclipse.ocl.examples.pivot.scoping.EnvironmentView;
import org.eclipse.ocl.examples.pivot.scoping.Attribution;
import org.eclipse.ocl.examples.pivot.scoping.ScopeView;
import org.eclipse.ocl.examples.pivot.utilities.PivotUtil;
public class DataTypeAttribution extends AbstractAttribution
{
public static final DataTypeAttribution INSTANCE = new DataTypeAttribution();
@Override
public ScopeView computeLookup(EObject target, EnvironmentView environmentView, ScopeView scopeView) {
DataType targetElement = (DataType) target;
Type behavioralType = targetElement.getBehavioralType();
Attribution attribution;
if (behavioralType != null) {
attribution = PivotUtil.getAttribution(behavioralType);
}
else {
attribution = ClassAttribution.INSTANCE;
}
+ environmentView.addElements(PivotUtil.getTypeTemplateParameterables(targetElement));
return attribution.computeLookup(target, environmentView, scopeView);
}
}
| true
| true
|
public ScopeView computeLookup(EObject target, EnvironmentView environmentView, ScopeView scopeView) {
DataType targetElement = (DataType) target;
Type behavioralType = targetElement.getBehavioralType();
Attribution attribution;
if (behavioralType != null) {
attribution = PivotUtil.getAttribution(behavioralType);
}
else {
attribution = ClassAttribution.INSTANCE;
}
return attribution.computeLookup(target, environmentView, scopeView);
}
|
public ScopeView computeLookup(EObject target, EnvironmentView environmentView, ScopeView scopeView) {
DataType targetElement = (DataType) target;
Type behavioralType = targetElement.getBehavioralType();
Attribution attribution;
if (behavioralType != null) {
attribution = PivotUtil.getAttribution(behavioralType);
}
else {
attribution = ClassAttribution.INSTANCE;
}
environmentView.addElements(PivotUtil.getTypeTemplateParameterables(targetElement));
return attribution.computeLookup(target, environmentView, scopeView);
}
|
diff --git a/flexmojos-maven-plugin/src/main/java/org/sonatype/flexmojos/compiler/TestCompilerMojo.java b/flexmojos-maven-plugin/src/main/java/org/sonatype/flexmojos/compiler/TestCompilerMojo.java
index ee0fc28a9..2282668b6 100644
--- a/flexmojos-maven-plugin/src/main/java/org/sonatype/flexmojos/compiler/TestCompilerMojo.java
+++ b/flexmojos-maven-plugin/src/main/java/org/sonatype/flexmojos/compiler/TestCompilerMojo.java
@@ -1,363 +1,363 @@
/**
* Flexmojos is a set of maven goals to allow maven users to compile, optimize and test Flex SWF, Flex SWC, Air SWF and Air SWC.
* Copyright (C) 2008-2012 Marvin Froeder <marvin@flexmojos.net>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.sonatype.flexmojos.compiler;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.io.IOUtils;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.codehaus.plexus.util.DirectoryScanner;
import org.sonatype.flexmojos.utilities.MavenUtils;
/**
* Goal to compile the Flex test sources.
*
* @author Marvin Herman Froeder (velo.br@gmail.com)
* @since 1.0
* @goal test-compile
* @requiresDependencyResolution
* @phase test
*/
public class TestCompilerMojo
extends ApplicationMojo
{
/**
* Set this to 'true' to bypass unit tests entirely. Its use is NOT RECOMMENDED, but quite convenient on occasion.
*
* @parameter expression="${maven.test.skip}"
*/
private boolean skipTests;
/**
* @parameter
*/
private File testRunnerTemplate;
/**
* File to be tested. If not defined assumes Test*.as and *Test.as
*
* @parameter
*/
private String[] includeTestFiles;
/**
* Files to exclude from testing. If not defined, assumes no exclusions
*
* @parameter
*/
private String[] excludeTestFiles;
/**
* @parameter expression="${project.build.testSourceDirectory}"
* @readonly
*/
private File testFolder;
/**
* Socket connect port for flex/java communication to transfer tests results
*
* @parameter default-value="13539" expression="${testPort}"
*/
private int testPort;
/**
* Socket connect port for flex/java communication to control if flashplayer is alive
*
* @parameter default-value="13540" expression="${testControlPort}"
*/
private int testControlPort;
@Override
public void execute()
throws MojoExecutionException, MojoFailureException
{
getLog().info(
"flexmojos " + MavenUtils.getFlexMojosVersion()
+ " - GNU GPL License (NO WARRANTY) - See COPYRIGHT file" );
if ( skipTests )
{
getLog().warn( "Skipping test phase." );
return;
}
else if ( !testFolder.exists() )
{
getLog().warn( "Test folder not found" + testFolder );
return;
}
setUp();
run();
tearDown();
}
@Override
public void setUp()
throws MojoExecutionException, MojoFailureException
{
isSetProjectFile = false;
linkReport = false;
loadExterns = null;
if ( includeTestFiles == null || includeTestFiles.length == 0 )
{
includeTestFiles = new String[] { "**/Test*.as", "**/*Test.as" };
}
else
{
for ( int i = 0; i < includeTestFiles.length; i++ )
{
String pattern = includeTestFiles[i];
- if ( pattern.endsWith( ".java" ) )
+ if ( pattern.endsWith( ".as" ) )
{
- pattern = pattern.substring( 0, pattern.length() - 5 );
+ pattern = pattern.substring( 0, pattern.length() - 3 );
}
// Allow paths delimited by '.' or '/'
pattern = pattern.replace( '.', '/' );
includeTestFiles[i] = "**/" + pattern + ".as";
}
}
File outputFolder = new File( build.getTestOutputDirectory() );
if ( !outputFolder.exists() )
{
outputFolder.mkdirs();
}
List<String> testClasses = getTestClasses();
File testSourceFile;
try
{
testSourceFile = generateTester( testClasses );
}
catch ( Exception e )
{
throw new MojoExecutionException( "Unable to generate tester class.", e );
}
sourceFile = null;
source = testSourceFile;
super.setUp();
}
private List<String> getTestClasses()
{
getLog().debug(
"Scanning for tests at " + testFolder + " for " + Arrays.toString( includeTestFiles ) + " but "
+ Arrays.toString( excludeTestFiles ) );
DirectoryScanner scanner = new DirectoryScanner();
scanner.setIncludes( includeTestFiles );
scanner.setExcludes( excludeTestFiles );
scanner.addDefaultExcludes();
scanner.setBasedir( testFolder );
scanner.scan();
getLog().debug( "Test files: " + scanner.getIncludedFiles() );
List<String> testClasses = new ArrayList<String>();
for ( String testClass : scanner.getIncludedFiles() )
{
int endPoint = testClass.lastIndexOf( '.' );
testClass = testClass.substring( 0, endPoint ); // remove extension
testClass = testClass.replace( '/', '.' ); // Unix OS
testClass = testClass.replace( '\\', '.' ); // Windows OS
testClasses.add( testClass );
}
getLog().debug( "Test classes: " + testClasses );
return testClasses;
}
private File generateTester( List<String> testClasses )
throws Exception
{
// can't use velocity, got:
// java.io.InvalidClassException:
// org.apache.velocity.runtime.parser.node.ASTprocess; class invalid for
// deserialization
StringBuilder imports = new StringBuilder();
for ( String testClass : testClasses )
{
imports.append( "import " );
imports.append( testClass );
imports.append( ";" );
imports.append( '\n' );
}
StringBuilder classes = new StringBuilder();
for ( String testClass : testClasses )
{
testClass = testClass.substring( testClass.lastIndexOf( '.' ) + 1 );
classes.append( "addTest( " );
classes.append( testClass );
classes.append( ");" );
classes.append( '\n' );
}
InputStream templateSource = getTemplate();
String sourceString = IOUtils.toString( templateSource );
sourceString = sourceString.replace( "$imports", imports );
sourceString = sourceString.replace( "$testClasses", classes );
sourceString = sourceString.replace( "$port", String.valueOf( testPort ) );
sourceString = sourceString.replace( "$controlPort", String.valueOf( testControlPort ) );
File testSourceFile = new File( build.getTestOutputDirectory(), "TestRunner.mxml" );
FileWriter fileWriter = new FileWriter( testSourceFile );
IOUtils.write( sourceString, fileWriter );
fileWriter.flush();
fileWriter.close();
return testSourceFile;
}
private InputStream getTemplate()
throws MojoExecutionException
{
if ( testRunnerTemplate == null )
{
return getClass().getResourceAsStream( "/templates/test/TestRunner.vm" );
}
else if ( !testRunnerTemplate.exists() )
{
throw new MojoExecutionException( "Template file not found: " + testRunnerTemplate );
}
else
{
try
{
return new FileInputStream( testRunnerTemplate );
}
catch ( FileNotFoundException e )
{
// Never should happen
throw new MojoExecutionException( "Error reading template file", e );
}
}
}
@Override
protected void configure()
throws MojoExecutionException, MojoFailureException
{
compiledLocales = getLocales();
runtimeLocales = null;
super.configure();
// test launcher is at testOutputDirectory
configuration.addSourcePath( new File[] { new File( build.getTestOutputDirectory() ) } );
configuration.addSourcePath( getValidSourceRoots( project.getTestCompileSourceRoots() ).toArray( new File[0] ) );
if ( getResource( compiledLocales[0] ) != null )
{
configuration.addSourcePath( new File[] { new File( resourceBundlePath ) } );
}
configuration.allowSourcePathOverlap( true );
configuration.enableDebugging( true, super.debugPassword );
}
private File getResource( String locale )
{
try
{
return MavenUtils.getLocaleResourcePath( resourceBundlePath, locale );
}
catch ( MojoExecutionException e )
{
return null;
}
}
@Override
protected void resolveDependencies()
throws MojoExecutionException, MojoFailureException
{
configuration.setExternalLibraryPath( getGlobalDependency() );
// Set all dependencies as merged
configuration.setLibraryPath( getDependenciesPath( "compile" ) );
configuration.addLibraryPath( getDependenciesPath( "merged" ) );
configuration.addLibraryPath( merge( getResourcesBundles( getDefaultLocale() ),
getResourcesBundles( runtimeLocales ),
getResourcesBundles( compiledLocales ) ) );
// and add test libraries
configuration.includeLibraries( merge( getDependenciesPath( "internal" ), getDependenciesPath( "test" ),
getDependenciesPath( "rsl" ), getDependenciesPath( "caching" ),
getDependenciesPath( "external" ) ) );
configuration.setTheme( getThemes() );
}
private String[] getLocales()
{
if ( runtimeLocales == null && compiledLocales == null )
{
return new String[] { getDefaultLocale() };
}
Set<String> locales = new LinkedHashSet<String>();
if ( runtimeLocales != null )
{
locales.addAll( Arrays.asList( runtimeLocales ) );
}
if ( compiledLocales != null )
{
locales.addAll( Arrays.asList( compiledLocales ) );
}
return locales.toArray( new String[0] );
}
private File[] merge( File[]... filesSets )
{
List<File> files = new ArrayList<File>();
for ( File[] fileSet : filesSets )
{
files.addAll( Arrays.asList( fileSet ) );
}
return files.toArray( new File[0] );
}
@Override
protected File getOutput()
{
return new File( build.getTestOutputDirectory(), "TestRunner.swf" );
}
@Override
protected void compileModules()
throws MojoFailureException, MojoExecutionException
{
// modules are ignored on unit tests
}
}
| false
| true
|
public void setUp()
throws MojoExecutionException, MojoFailureException
{
isSetProjectFile = false;
linkReport = false;
loadExterns = null;
if ( includeTestFiles == null || includeTestFiles.length == 0 )
{
includeTestFiles = new String[] { "**/Test*.as", "**/*Test.as" };
}
else
{
for ( int i = 0; i < includeTestFiles.length; i++ )
{
String pattern = includeTestFiles[i];
if ( pattern.endsWith( ".java" ) )
{
pattern = pattern.substring( 0, pattern.length() - 5 );
}
// Allow paths delimited by '.' or '/'
pattern = pattern.replace( '.', '/' );
includeTestFiles[i] = "**/" + pattern + ".as";
}
}
File outputFolder = new File( build.getTestOutputDirectory() );
if ( !outputFolder.exists() )
{
outputFolder.mkdirs();
}
List<String> testClasses = getTestClasses();
File testSourceFile;
try
{
testSourceFile = generateTester( testClasses );
}
catch ( Exception e )
{
throw new MojoExecutionException( "Unable to generate tester class.", e );
}
sourceFile = null;
source = testSourceFile;
super.setUp();
}
|
public void setUp()
throws MojoExecutionException, MojoFailureException
{
isSetProjectFile = false;
linkReport = false;
loadExterns = null;
if ( includeTestFiles == null || includeTestFiles.length == 0 )
{
includeTestFiles = new String[] { "**/Test*.as", "**/*Test.as" };
}
else
{
for ( int i = 0; i < includeTestFiles.length; i++ )
{
String pattern = includeTestFiles[i];
if ( pattern.endsWith( ".as" ) )
{
pattern = pattern.substring( 0, pattern.length() - 3 );
}
// Allow paths delimited by '.' or '/'
pattern = pattern.replace( '.', '/' );
includeTestFiles[i] = "**/" + pattern + ".as";
}
}
File outputFolder = new File( build.getTestOutputDirectory() );
if ( !outputFolder.exists() )
{
outputFolder.mkdirs();
}
List<String> testClasses = getTestClasses();
File testSourceFile;
try
{
testSourceFile = generateTester( testClasses );
}
catch ( Exception e )
{
throw new MojoExecutionException( "Unable to generate tester class.", e );
}
sourceFile = null;
source = testSourceFile;
super.setUp();
}
|
diff --git a/src/gtna/networks/util/ReadableFolder.java b/src/gtna/networks/util/ReadableFolder.java
index fef2d21e..e757c965 100644
--- a/src/gtna/networks/util/ReadableFolder.java
+++ b/src/gtna/networks/util/ReadableFolder.java
@@ -1,96 +1,100 @@
/* ===========================================================
* GTNA : Graph-Theoretic Network Analyzer
* ===========================================================
*
* (C) Copyright 2009-2011, by Benjamin Schiller (P2P, TU Darmstadt)
* and Contributors
*
* Project Info: http://www.p2p.tu-darmstadt.de/research/gtna/
*
* GTNA is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* GTNA is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* ---------------------------------------
* ReadableFolder.java
* ---------------------------------------
* (C) Copyright 2009-2011, by Benjamin Schiller (P2P, TU Darmstadt)
* and Contributors
*
* Original Author: "Benjamin Schiller";
* Contributors: -;
*
* Changes since 2011-05-17
* ---------------------------------------
*
*/
package gtna.networks.util;
import gtna.graph.Graph;
import gtna.io.GraphReader;
import gtna.networks.Network;
import gtna.networks.NetworkImpl;
import gtna.routing.RoutingAlgorithm;
import gtna.transformation.Transformation;
import gtna.util.Config;
import java.io.File;
/**
* @author "Benjamin Schiller"
*
*/
public class ReadableFolder extends NetworkImpl implements Network {
private int type;
private String[] files;
private int index;
public ReadableFolder(String name, String folder, String src, int type,
RoutingAlgorithm ra, Transformation[] t) {
super(key(name, folder), Integer.MIN_VALUE, new String[] {},
new String[] {}, ra, t);
this.type = type;
File d = new File(src);
if (!d.exists()) {
this.files = new String[0];
} else {
File[] f = d.listFiles();
this.files = new String[f.length];
for (int i = 0; i < f.length; i++) {
this.files[i] = f[i].getAbsolutePath();
}
}
this.index = -1;
- super.setNodes(GraphReader.nodes(this.files[0], this.type));
+ if (this.files.length == 0) {
+ super.setNodes(0);
+ } else {
+ super.setNodes(GraphReader.nodes(this.files[0], this.type));
+ }
}
public static String key(String name, String folder) {
Config.overwrite("READABLE_FOLDER_" + folder + "_NAME", name);
Config.overwrite("READABLE_FOLDER_" + folder + "_FOLDER", folder);
return "READABLE_FOLDER_" + folder;
}
public Graph generate() {
if (this.files.length == 0) {
return null;
}
this.index = (index + 1) % this.files.length;
return GraphReader.read(this.files[this.index], this.type, this
.description());
}
public String[] getFiles() {
return this.files;
}
}
| true
| true
|
public ReadableFolder(String name, String folder, String src, int type,
RoutingAlgorithm ra, Transformation[] t) {
super(key(name, folder), Integer.MIN_VALUE, new String[] {},
new String[] {}, ra, t);
this.type = type;
File d = new File(src);
if (!d.exists()) {
this.files = new String[0];
} else {
File[] f = d.listFiles();
this.files = new String[f.length];
for (int i = 0; i < f.length; i++) {
this.files[i] = f[i].getAbsolutePath();
}
}
this.index = -1;
super.setNodes(GraphReader.nodes(this.files[0], this.type));
}
|
public ReadableFolder(String name, String folder, String src, int type,
RoutingAlgorithm ra, Transformation[] t) {
super(key(name, folder), Integer.MIN_VALUE, new String[] {},
new String[] {}, ra, t);
this.type = type;
File d = new File(src);
if (!d.exists()) {
this.files = new String[0];
} else {
File[] f = d.listFiles();
this.files = new String[f.length];
for (int i = 0; i < f.length; i++) {
this.files[i] = f[i].getAbsolutePath();
}
}
this.index = -1;
if (this.files.length == 0) {
super.setNodes(0);
} else {
super.setNodes(GraphReader.nodes(this.files[0], this.type));
}
}
|
diff --git a/library/src/com/inqbarna/tablefixheaders/TableFixHeaders.java b/library/src/com/inqbarna/tablefixheaders/TableFixHeaders.java
index c994b72..f8b68d8 100644
--- a/library/src/com/inqbarna/tablefixheaders/TableFixHeaders.java
+++ b/library/src/com/inqbarna/tablefixheaders/TableFixHeaders.java
@@ -1,524 +1,524 @@
package com.inqbarna.tablefixheaders;
import java.util.ArrayList;
import java.util.List;
import com.inqbarna.tablefixheaders.adapters.TableAdapter;
import android.annotation.SuppressLint;
import android.content.Context;
import android.database.DataSetObserver;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
public class TableFixHeaders extends ViewGroup {
private final static int CLICK_SENSIVILITY = 2;
private int currentX;
private int currentY;
private TableAdapter adapter;
private int scrollX;
private int scrollY;
private int firstRow;
private int firstColumn;
private int[] widths;
private int[] heights;
@SuppressWarnings("unused")
private View headView;
private List<View> rowViewList;
private List<View> columnViewList;
private List<List<View>> bodyViewTable;
private int rowCount;
private int columnCount;
private int width;
private int height;
private Recycler recycler;
private TableAdapterDataSetObserver tableAdapterDataSetObserver;
private boolean needRelayout;
private ImageView[] shadows;
public TableFixHeaders(Context context) {
this(context, null);
}
public TableFixHeaders(Context context, AttributeSet attrs) {
super(context, attrs);
this.firstRow = 0;
this.firstColumn = 0;
this.scrollX = 0;
this.scrollY = 0;
this.headView = null;
this.rowViewList = new ArrayList<View>();
this.columnViewList = new ArrayList<View>();
this.bodyViewTable = new ArrayList<List<View>>();
this.needRelayout = true;
this.shadows = new ImageView[4];
this.shadows[0] = new ImageView(context);
this.shadows[0].setImageResource(R.drawable.shadow_left);
this.shadows[1] = new ImageView(context);
this.shadows[1].setImageResource(R.drawable.shadow_top);
this.shadows[2] = new ImageView(context);
this.shadows[2].setImageResource(R.drawable.shadow_right);
this.shadows[3] = new ImageView(context);
this.shadows[3].setImageResource(R.drawable.shadow_bottom);
}
public TableAdapter getAdapter() {
return adapter;
}
public void setAdapter(TableAdapter adapter) {
if (this.adapter != null) {
this.adapter.unregisterDataSetObserver(tableAdapterDataSetObserver);
}
this.adapter = adapter;
tableAdapterDataSetObserver = new TableAdapterDataSetObserver();
this.adapter.registerDataSetObserver(tableAdapterDataSetObserver);
this.recycler = new Recycler(adapter.getViewTypeCount());
this.rowCount = adapter.getRowCount();
this.columnCount = adapter.getColumnCount();
needRelayout = true;
requestLayout();
}
@Override
public boolean onInterceptTouchEvent(MotionEvent event) {
boolean intercept = false;
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN: {
currentX = (int) event.getRawX();
currentY = (int) event.getRawY();
break;
}
case MotionEvent.ACTION_MOVE: {
int x2 = currentX - (int) event.getRawX();
int y2 = currentY - (int) event.getRawY();
if (x2 < -CLICK_SENSIVILITY || x2 > CLICK_SENSIVILITY || y2 < -CLICK_SENSIVILITY || y2 > CLICK_SENSIVILITY) {
intercept = true;
}
break;
}
}
return intercept;
}
@Override
public boolean onTouchEvent(MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN: {
currentX = (int) event.getRawX();
currentY = (int) event.getRawY();
break;
}
case MotionEvent.ACTION_MOVE: {
final int x2 = (int) event.getRawX();
final int y2 = (int) event.getRawY();
final int diffX = currentX - x2;
final int diffY = currentY - y2;
currentX = x2;
currentY = y2;
scrollX += diffX;
scrollY += diffY;
final Boolean left = diffX == 0 ? null : diffX <= 0;
final Boolean up = diffY == 0 ? null : diffY <= 0;
// scroll bounds
if (scrollX == 0) {
// no op
} else if (scrollX < 0) {
scrollX = Math.max(scrollX, -sumArray(widths, 1, firstColumn));
} else {
scrollX = Math.min(scrollX, sumArray(widths, firstColumn + 1, columnCount - firstColumn) + widths[0] - width);
}
if (scrollY == 0) {
// no op
} else if (scrollY < 0) {
scrollY = Math.max(scrollY, -sumArray(heights, 1, firstRow));
} else {
scrollY = Math.min(scrollY, Math.max(0, sumArray(heights, firstRow + 1, rowCount - firstRow) + heights[0] - height));
}
// add or remove views
if (left == null) {
// no op
- } else if (!left) {
+ } else if (!left && scrollX != 0) {
while (widths[firstColumn + 1] < scrollX) {
removeLeft();
scrollX -= widths[firstColumn + 1];
firstColumn++;
}
while (getFilledWidth() < width) {
addRight();
}
- } else {
+ } else if (left) {
while (getFilledWidth() - widths[firstColumn + rowViewList.size() - 1] >= width) {
removeRight();
}
while (0 > scrollX) {
addLeft();
firstColumn--;
scrollX += widths[firstColumn + 1];
}
}
if (up == null) {
// no op
- } else if (!up) {
+ } else if (!up && scrollY != 0) {
while (heights[firstRow + 1] < scrollY) {
removeTop();
scrollY -= heights[firstRow + 1];
firstRow++;
}
while (getFilledHeight() < height) {
addBottom();
}
- } else {
+ } else if (up) {
while (getFilledHeight() - heights[firstRow + columnViewList.size() - 1] >= height) {
removeBottom();
}
while (0 > scrollY) {
addTop();
firstRow--;
scrollY += heights[firstRow + 1];
}
}
repositionViews();
break;
}
}
return true;
}
private int getFilledWidth() {
return widths[0] + sumArray(widths, firstColumn + 1, rowViewList.size()) - scrollX;
}
private int getFilledHeight() {
return heights[0] + sumArray(heights, firstRow + 1, columnViewList.size()) - scrollY;
}
private void addLeft() {
System.out.println("addLeft");
addLeftOrRight(firstColumn - 1, 0);
}
private void addTop() {
System.out.println("addTop");
addTopAndBottom(firstRow - 1, 0);
}
private void addRight() {
System.out.println("addRight");
final int size = rowViewList.size();
addLeftOrRight(firstColumn + size, size);
}
private void addBottom() {
System.out.println("addBottom");
final int size = columnViewList.size();
addTopAndBottom(firstRow + size, size);
}
private void addLeftOrRight(int column, int index) {
View view = makeView(-1, column, widths[column + 1], heights[0]);
rowViewList.add(index, view);
int i = firstRow;
for (List<View> list : bodyViewTable) {
view = makeView(i, column, widths[column + 1], heights[i + 1]);
list.add(index, view);
i++;
}
}
private void addTopAndBottom(int row, int index) {
View view = makeView(row, -1, widths[0], heights[row + 1]);
columnViewList.add(index, view);
List<View> list = new ArrayList<View>();
final int size = rowViewList.size() + firstColumn;
for (int i = firstColumn; i < size; i++) {
view = makeView(row, i, widths[i + 1], heights[row + 1]);
list.add(view);
}
bodyViewTable.add(index, list);
}
private void removeLeft() {
System.out.println("removeLeft");
removeLeftOrRight(0);
}
private void removeTop() {
System.out.println("removeTop");
removeTopOrBottom(0);
}
private void removeRight() {
System.out.println("removeRight");
removeLeftOrRight(rowViewList.size() - 1);
}
private void removeBottom() {
System.out.println("removeBottom");
removeTopOrBottom(columnViewList.size() - 1);
}
private void removeLeftOrRight(int position) {
removeView(rowViewList.remove(position));
for (List<View> list : bodyViewTable) {
removeView(list.remove(position));
}
}
private void removeTopOrBottom(int position) {
removeView(columnViewList.remove(position));
List<View> remove = bodyViewTable.remove(position);
for (View view : remove) {
removeView(view);
}
}
@Override
public void removeView(View view) {
super.removeView(view);
recycler.addRecycledView(view, 0);
}
private void repositionViews() {
int left, top, right, bottom, i;
left = widths[0] - scrollX;
i = firstColumn;
for (View view : rowViewList) {
right = left + widths[++i];
view.layout(left, 0, right, heights[0]);
left = right;
}
top = heights[0] - scrollY;
i = firstRow;
for (View view : columnViewList) {
bottom = top + heights[++i];
view.layout(0, top, widths[0], bottom);
top = bottom;
}
top = heights[0] - scrollY;
i = firstRow;
for (List<View> list : bodyViewTable) {
bottom = top + heights[++i];
left = widths[0] - scrollX;
int j = firstColumn;
for (View view : list) {
right = left + widths[++j];
view.layout(left, top, right, bottom);
left = right;
}
top = bottom;
}
invalidate();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int widthMode = MeasureSpec.getMode(widthMeasureSpec);
final int heightMode = MeasureSpec.getMode(heightMeasureSpec);
final int widthSize = MeasureSpec.getSize(widthMeasureSpec);
final int heightSize = MeasureSpec.getSize(heightMeasureSpec);
final int w;
final int h;
if (adapter != null) {
widths = new int[columnCount + 1];
for (int i = -1; i < columnCount; i++) {
widths[i + 1] += adapter.getWidth(i);
}
heights = new int[rowCount + 1];
for (int i = -1; i < rowCount; i++) {
heights[i + 1] += adapter.getHeight(i);
}
if (widthMode == MeasureSpec.AT_MOST) {
w = Math.min(widthSize, sumArray(widths));
} else if (widthMode == MeasureSpec.UNSPECIFIED) {
w = sumArray(widths);
} else {
w = widthSize;
int sumArray = sumArray(widths);
if (sumArray < widthSize) {
final float factor = widthSize / (float) sumArray;
for (int i = 1; i < widths.length; i++) {
widths[i] = Math.round(widths[i] * factor);
}
widths[0] = widthSize - sumArray(widths, 1, widths.length - 1);
}
}
if (heightMode == MeasureSpec.AT_MOST) {
h = Math.min(heightSize, sumArray(heights));
} else if (heightMode == MeasureSpec.UNSPECIFIED) {
h = sumArray(heights);
} else {
h = heightSize;
}
} else {
if (heightMode == MeasureSpec.AT_MOST || widthMode == MeasureSpec.UNSPECIFIED) {
w = 0;
h = 0;
} else {
w = widthSize;
h = heightSize;
}
}
setMeasuredDimension(w, h);
}
private int sumArray(int array[]) {
return sumArray(array, 0, array.length);
}
private int sumArray(int array[], int firstIndex, int count) {
int sum = 0;
count += firstIndex;
for (int i = firstIndex; i < count; i++) {
sum += array[i];
}
return sum;
}
@SuppressLint("DrawAllocation")
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
System.out.println("boolean " + changed + ", int " + l + ", int " + t + ", int " + r + ", int " + b);
if (needRelayout || changed) {
needRelayout = false;
resetTable();
if (adapter != null) {
width = r - l;
height = b - t;
int left, top, right, bottom;
final int shadowSize = getResources().getDimensionPixelSize(R.dimen.shadow_size);
right = Math.min(width, sumArray(widths));
bottom = Math.min(height, sumArray(heights));
addShadow(shadows[0], widths[0], 0, widths[0] + shadowSize, bottom);
addShadow(shadows[1], 0, heights[0], right, heights[0] + shadowSize);
addShadow(shadows[2], right - shadowSize, 0, right, bottom);
addShadow(shadows[3], 0, bottom - shadowSize, right, bottom);
headView = makeAndSetup(-1, -1, 0, 0, widths[0], heights[0]);
left = widths[0] - scrollX;
for (int i = firstColumn; i < columnCount && left < width; i++) {
right = left + widths[i + 1];
final View view = makeAndSetup(-1, i, left, 0, right, heights[0]);
rowViewList.add(view);
left = right;
}
top = heights[0] - scrollY;
for (int i = firstRow; i < rowCount && top < height; i++) {
bottom = top + heights[i + 1];
final View view = makeAndSetup(i, -1, 0, top, widths[0], bottom);
columnViewList.add(view);
top = bottom;
}
top = heights[0] - scrollY;
for (int i = firstRow; i < rowCount && top < height; i++) {
bottom = top + heights[i + 1];
left = widths[0] - scrollX;
List<View> list = new ArrayList<View>();
for (int j = firstColumn; j < columnCount && left < width; j++) {
right = left + widths[j + 1];
final View view = makeAndSetup(i, j, left, top, right, bottom);
list.add(view);
left = right;
}
bodyViewTable.add(list);
top = bottom;
}
}
}
}
private void addShadow(ImageView imageView, int l, int t, int r, int b) {
imageView.layout(l, t, r, b);
addView(imageView);
}
private void resetTable() {
headView = null;
rowViewList.clear();
columnViewList.clear();
bodyViewTable.clear();
removeAllViews();
}
private View makeAndSetup(int row, int column, int left, int top, int right, int bottom) {
final View view = makeView(row, column, right - left, bottom - top);
view.layout(left, top, right, bottom);
return view;
}
private View makeView(int row, int column, int w, int h) {
final View view = adapter.getView(row, column, recycler.getRecycledView(adapter.getItemViewType(row, column)), this);
view.measure(MeasureSpec.makeMeasureSpec(w, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(h, MeasureSpec.EXACTLY));
addTableView(view, row, column);
return view;
}
private void addTableView(View view, int row, int column) {
if (row == -1 && column == -1) {
addView(view, getChildCount() - 4);
} else if (row == -1 || column == -1) {
addView(view, getChildCount() - 5);
} else {
addView(view, 0);
}
}
private class TableAdapterDataSetObserver extends DataSetObserver {
@Override
public void onChanged() {
needRelayout = true;
requestLayout();
}
@Override
public void onInvalidated() {
// Do nothing
}
}
}
| false
| true
|
public boolean onTouchEvent(MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN: {
currentX = (int) event.getRawX();
currentY = (int) event.getRawY();
break;
}
case MotionEvent.ACTION_MOVE: {
final int x2 = (int) event.getRawX();
final int y2 = (int) event.getRawY();
final int diffX = currentX - x2;
final int diffY = currentY - y2;
currentX = x2;
currentY = y2;
scrollX += diffX;
scrollY += diffY;
final Boolean left = diffX == 0 ? null : diffX <= 0;
final Boolean up = diffY == 0 ? null : diffY <= 0;
// scroll bounds
if (scrollX == 0) {
// no op
} else if (scrollX < 0) {
scrollX = Math.max(scrollX, -sumArray(widths, 1, firstColumn));
} else {
scrollX = Math.min(scrollX, sumArray(widths, firstColumn + 1, columnCount - firstColumn) + widths[0] - width);
}
if (scrollY == 0) {
// no op
} else if (scrollY < 0) {
scrollY = Math.max(scrollY, -sumArray(heights, 1, firstRow));
} else {
scrollY = Math.min(scrollY, Math.max(0, sumArray(heights, firstRow + 1, rowCount - firstRow) + heights[0] - height));
}
// add or remove views
if (left == null) {
// no op
} else if (!left) {
while (widths[firstColumn + 1] < scrollX) {
removeLeft();
scrollX -= widths[firstColumn + 1];
firstColumn++;
}
while (getFilledWidth() < width) {
addRight();
}
} else {
while (getFilledWidth() - widths[firstColumn + rowViewList.size() - 1] >= width) {
removeRight();
}
while (0 > scrollX) {
addLeft();
firstColumn--;
scrollX += widths[firstColumn + 1];
}
}
if (up == null) {
// no op
} else if (!up) {
while (heights[firstRow + 1] < scrollY) {
removeTop();
scrollY -= heights[firstRow + 1];
firstRow++;
}
while (getFilledHeight() < height) {
addBottom();
}
} else {
while (getFilledHeight() - heights[firstRow + columnViewList.size() - 1] >= height) {
removeBottom();
}
while (0 > scrollY) {
addTop();
firstRow--;
scrollY += heights[firstRow + 1];
}
}
repositionViews();
break;
}
}
return true;
}
|
public boolean onTouchEvent(MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN: {
currentX = (int) event.getRawX();
currentY = (int) event.getRawY();
break;
}
case MotionEvent.ACTION_MOVE: {
final int x2 = (int) event.getRawX();
final int y2 = (int) event.getRawY();
final int diffX = currentX - x2;
final int diffY = currentY - y2;
currentX = x2;
currentY = y2;
scrollX += diffX;
scrollY += diffY;
final Boolean left = diffX == 0 ? null : diffX <= 0;
final Boolean up = diffY == 0 ? null : diffY <= 0;
// scroll bounds
if (scrollX == 0) {
// no op
} else if (scrollX < 0) {
scrollX = Math.max(scrollX, -sumArray(widths, 1, firstColumn));
} else {
scrollX = Math.min(scrollX, sumArray(widths, firstColumn + 1, columnCount - firstColumn) + widths[0] - width);
}
if (scrollY == 0) {
// no op
} else if (scrollY < 0) {
scrollY = Math.max(scrollY, -sumArray(heights, 1, firstRow));
} else {
scrollY = Math.min(scrollY, Math.max(0, sumArray(heights, firstRow + 1, rowCount - firstRow) + heights[0] - height));
}
// add or remove views
if (left == null) {
// no op
} else if (!left && scrollX != 0) {
while (widths[firstColumn + 1] < scrollX) {
removeLeft();
scrollX -= widths[firstColumn + 1];
firstColumn++;
}
while (getFilledWidth() < width) {
addRight();
}
} else if (left) {
while (getFilledWidth() - widths[firstColumn + rowViewList.size() - 1] >= width) {
removeRight();
}
while (0 > scrollX) {
addLeft();
firstColumn--;
scrollX += widths[firstColumn + 1];
}
}
if (up == null) {
// no op
} else if (!up && scrollY != 0) {
while (heights[firstRow + 1] < scrollY) {
removeTop();
scrollY -= heights[firstRow + 1];
firstRow++;
}
while (getFilledHeight() < height) {
addBottom();
}
} else if (up) {
while (getFilledHeight() - heights[firstRow + columnViewList.size() - 1] >= height) {
removeBottom();
}
while (0 > scrollY) {
addTop();
firstRow--;
scrollY += heights[firstRow + 1];
}
}
repositionViews();
break;
}
}
return true;
}
|
diff --git a/grails/src/web/org/codehaus/groovy/grails/web/servlet/mvc/SimpleGrailsControllerHelper.java b/grails/src/web/org/codehaus/groovy/grails/web/servlet/mvc/SimpleGrailsControllerHelper.java
index 5e6940c05..3ec0294c3 100644
--- a/grails/src/web/org/codehaus/groovy/grails/web/servlet/mvc/SimpleGrailsControllerHelper.java
+++ b/grails/src/web/org/codehaus/groovy/grails/web/servlet/mvc/SimpleGrailsControllerHelper.java
@@ -1,622 +1,622 @@
/*
* Copyright 2004-2005 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.grails.web.servlet.mvc;
import groovy.lang.Closure;
import groovy.lang.GroovyObject;
import groovy.lang.MissingPropertyException;
import groovy.util.Proxy;
import org.apache.commons.beanutils.BeanMap;
import org.apache.commons.collections.map.CompositeMap;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.codehaus.groovy.grails.commons.ControllerArtefactHandler;
import org.codehaus.groovy.grails.commons.GrailsApplication;
import org.codehaus.groovy.grails.commons.GrailsClassUtils;
import org.codehaus.groovy.grails.commons.GrailsControllerClass;
import org.codehaus.groovy.grails.scaffolding.GrailsScaffolder;
import org.codehaus.groovy.grails.web.metaclass.ChainDynamicMethod;
import org.codehaus.groovy.grails.web.metaclass.ControllerDynamicMethods;
import org.codehaus.groovy.grails.web.servlet.DefaultGrailsApplicationAttributes;
import org.codehaus.groovy.grails.web.servlet.FlashScope;
import org.codehaus.groovy.grails.web.servlet.GrailsApplicationAttributes;
import org.codehaus.groovy.grails.web.servlet.mvc.exceptions.ControllerExecutionException;
import org.codehaus.groovy.grails.web.servlet.mvc.exceptions.NoClosurePropertyForURIException;
import org.codehaus.groovy.grails.web.servlet.mvc.exceptions.NoViewNameDefinedException;
import org.codehaus.groovy.grails.web.servlet.mvc.exceptions.UnknownControllerException;
import org.codehaus.groovy.grails.webflow.executor.support.GrailsConventionsFlowExecutorArgumentHandler;
import org.springframework.context.ApplicationContext;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.view.RedirectView;
import org.springframework.web.util.WebUtils;
import org.springframework.web.util.UrlPathHelper;
import org.springframework.webflow.context.servlet.ServletExternalContext;
import org.springframework.webflow.execution.support.ApplicationView;
import org.springframework.webflow.execution.support.ExternalRedirect;
import org.springframework.webflow.execution.support.FlowDefinitionRedirect;
import org.springframework.webflow.execution.support.FlowExecutionRedirect;
import org.springframework.webflow.executor.FlowExecutor;
import org.springframework.webflow.executor.ResponseInstruction;
import org.springframework.webflow.executor.support.FlowExecutorArgumentHandler;
import org.springframework.webflow.executor.support.FlowRequestHandler;
import org.springframework.webflow.executor.support.ResponseInstructionHandler;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.*;
/**
* <p>This is a helper class that does the main job of dealing with Grails web requests
*
* @author Graeme Rocher
* @since 0.1
*
* Created: 12-Jan-2006
*/
public class SimpleGrailsControllerHelper implements GrailsControllerHelper {
private static final String SCAFFOLDER = "Scaffolder";
private GrailsApplication application;
private ApplicationContext applicationContext;
private Map chainModel = Collections.EMPTY_MAP;
private ServletContext servletContext;
private GrailsApplicationAttributes grailsAttributes;
private GrailsWebRequest webRequest;
private static final Log LOG = LogFactory.getLog(SimpleGrailsControllerHelper.class);
private static final char SLASH = '/';
private static final String DISPATCH_ACTION_PARAMETER = "_action_";
private static final String FLOW_EXECUTOR_BEAN = "flowExecutor";
private String id;
private String controllerName;
private String actionName;
private String controllerActionURI;
public SimpleGrailsControllerHelper(GrailsApplication application, ApplicationContext context, ServletContext servletContext) {
super();
this.application = application;
this.applicationContext = context;
this.servletContext = servletContext;
this.grailsAttributes = new DefaultGrailsApplicationAttributes(this.servletContext);
}
public ServletContext getServletContext() {
return this.servletContext;
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.web.servlet.mvc.GrailsControllerHelper#getControllerClassByName(java.lang.String)
*/
public GrailsControllerClass getControllerClassByName(String name) {
return (GrailsControllerClass) this.application.getArtefact(
ControllerArtefactHandler.TYPE, name);
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.web.servlet.mvc.GrailsControllerHelper#getControllerClassByURI(java.lang.String)
*/
public GrailsControllerClass getControllerClassByURI(String uri) {
return (GrailsControllerClass) this.application.getArtefactForFeature(
ControllerArtefactHandler.TYPE, uri);
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.web.servlet.mvc.GrailsControllerHelper#getControllerInstance(org.codehaus.groovy.grails.commons.GrailsControllerClass)
*/
public GroovyObject getControllerInstance(GrailsControllerClass controllerClass) {
return (GroovyObject)this.applicationContext.getBean(controllerClass.getFullName());
}
/**
* If in Proxy's are used in the Groovy context, unproxy (is that a word?) them by setting
* the adaptee as the value in the map so that they can be used in non-groovy view technologies
*
* @param model The model as a map
*/
private void removeProxiesFromModelObjects(Map model) {
for (Iterator keyIter = model.keySet().iterator(); keyIter.hasNext();) {
Object current = keyIter.next();
Object modelObject = model.get(current);
if(modelObject instanceof Proxy) {
model.put( current, ((Proxy)modelObject).getAdaptee() );
}
}
}
public ModelAndView handleURI(String uri, GrailsWebRequest webRequest) {
return handleURI(uri, webRequest, Collections.EMPTY_MAP);
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.web.servlet.mvc.GrailsControllerHelper#handleURI(java.lang.String, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse, java.util.Map)
*/
public ModelAndView handleURI(String uri, GrailsWebRequest webRequest, Map params) {
if(uri == null) {
throw new IllegalArgumentException("Controller URI [" + uri + "] cannot be null!");
}
HttpServletRequest request = webRequest.getCurrentRequest();
HttpServletResponse response = webRequest.getCurrentResponse();
configureStateForWebRequest(webRequest, request);
// if the action name is blank check its included as dispatch parameter
if(StringUtils.isBlank(actionName)) {
uri = checkDispatchAction(request, uri);
}
if(uri.endsWith("/")) {
uri = uri.substring(0,uri.length() - 1);
}
// if the id is blank check if its a request parameter
// Step 2: lookup the controller in the application.
GrailsControllerClass controllerClass = getControllerClassByURI(uri);
if (controllerClass == null) {
throw new UnknownControllerException("No controller found for URI [" + uri + "]!");
}
actionName = controllerClass.getClosurePropertyName(uri);
webRequest.setActionName(actionName);
if(LOG.isDebugEnabled()) {
LOG.debug("Processing request for controller ["+controllerName+"], action ["+actionName+"], and id ["+id+"]");
}
controllerActionURI = SLASH + controllerName + SLASH + actionName + SLASH;
// Step 3: load controller from application context.
GroovyObject controller = getControllerInstance(controllerClass);
if(!controllerClass.isHttpMethodAllowedForAction(controller, request.getMethod(), actionName)) {
try {
response.sendError(HttpServletResponse.SC_FORBIDDEN);
return null;
} catch (IOException e) {
throw new ControllerExecutionException("I/O error sending 403 error",e);
}
}
request.setAttribute( GrailsApplicationAttributes.CONTROLLER, controller );
// Step 3: if scaffolding retrieve scaffolder
GrailsScaffolder scaffolder = obtainScaffolder(controllerClass);
if (StringUtils.isBlank(actionName)) {
// Step 4a: Check if scaffolding
if( controllerClass.isScaffolding() && !scaffolder.supportsAction(actionName))
throw new NoClosurePropertyForURIException("Could not find closure property for URI [" + uri + "] for controller [" + controllerClass.getFullName() + "]!");
}
// Step 4: Set grails attributes in request scope
request.setAttribute(GrailsApplicationAttributes.REQUEST_SCOPE_ID,this.grailsAttributes);
// Step 5: get the view name for this URI.
String viewName = controllerClass.getViewByURI(uri);
boolean executeAction = invokeBeforeInterceptor(controller, controllerClass);
// if the interceptor returned false don't execute the action
if(!executeAction)
return null;
ModelAndView mv;
if(controllerClass.isFlowAction(actionName)) {
mv = executeFlow(webRequest,request, response);
}
else {
mv = executeAction(controller, controllerClass, viewName, request, response, params);
}
- boolean returnModelAndView = invokeAfterInterceptor(controllerClass, controller, mv);
+ boolean returnModelAndView = invokeAfterInterceptor(controllerClass, controller, mv) && !response.isCommitted();
return returnModelAndView ? mv : null;
}
/**
* This method is responsible for execution of a flow based on the currently executing GrailsWebRequest
*
* @param webRequest The GrailsWebRequest
* @param request The HttpServletRequest instance
* @param response The HttpServletResponse instance
* @return A Spring ModelAndView
*/
protected ModelAndView executeFlow(GrailsWebRequest webRequest, final HttpServletRequest request, HttpServletResponse response) {
final Thread currentThread = Thread.currentThread();
ClassLoader cl = currentThread.getContextClassLoader();
try {
currentThread.setContextClassLoader(application.getClassLoader());
FlowExecutorArgumentHandler argumentHandler = new GrailsConventionsFlowExecutorArgumentHandler(webRequest);
FlowExecutor flowExecutor = getFlowExecutor();
if(flowExecutor == null) {
throw new ControllerExecutionException("No [flowExecutor] found. This is likely a configuration problem, check your version of Grails.");
}
ServletExternalContext externalContext = new ServletExternalContext(getServletContext(), request, response) {
public String getDispatcherPath() {
String forwardURI = (String)request.getAttribute(WebUtils.FORWARD_REQUEST_URI_ATTRIBUTE);
UrlPathHelper pathHelper = new UrlPathHelper();
String contextPath = pathHelper.getContextPath(request);
if(forwardURI.startsWith(contextPath)) {
return forwardURI.substring(contextPath.length(),forwardURI.length());
}
else {
return forwardURI;
}
}
};
ResponseInstruction responseInstruction = createRequestHandler(argumentHandler).handleFlowRequest(externalContext);
return toModelAndView(responseInstruction, externalContext, argumentHandler);
}
finally {
currentThread.setContextClassLoader(cl);
}
}
/**
* Deals with translating a WebFlow ResponseInstruction instance into an appropriate Spring ModelAndView
*
* @param responseInstruction The ResponseInstruction instance
* @param context The ExternalContext for the webflow
* @param argumentHandler The FlowExecutorArgumentHandler instance
* @return A Spring ModelAndView instance
*/
protected ModelAndView toModelAndView(final ResponseInstruction responseInstruction, final ServletExternalContext context, final FlowExecutorArgumentHandler argumentHandler) {
return (ModelAndView)new ResponseInstructionHandler() {
protected void handleApplicationView(ApplicationView view) throws Exception {
// forward to a view as part of an active conversation
Map model = new HashMap(view.getModel());
argumentHandler.exposeFlowExecutionContext(responseInstruction.getFlowExecutionKey(),
responseInstruction.getFlowExecutionContext(), model);
final String viewName = view.getViewName();
if(viewName.startsWith("/"))
setResult(new ModelAndView(viewName, model));
else
setResult(new ModelAndView(controllerActionURI + viewName, model));
}
protected void handleFlowDefinitionRedirect(FlowDefinitionRedirect redirect) throws Exception {
// restart the flow by redirecting to flow launch URL
if(LOG.isDebugEnabled())
LOG.debug("Flow definition redirect issued to flow with id " + redirect.getFlowDefinitionId());
String flowUrl = argumentHandler.createFlowDefinitionUrl(redirect, context);
setResult(new ModelAndView(new RedirectView(flowUrl)));
}
protected void handleFlowExecutionRedirect(FlowExecutionRedirect redirect) throws Exception {
if(LOG.isDebugEnabled())
LOG.debug("Flow execution redirect issued " + redirect);
// redirect to active flow execution URL
String flowExecutionUrl = argumentHandler.createFlowExecutionUrl(
responseInstruction.getFlowExecutionKey(),
responseInstruction.getFlowExecutionContext(), context);
setResult(new ModelAndView(new RedirectView(flowExecutionUrl)));
}
protected void handleExternalRedirect(ExternalRedirect redirect) throws Exception {
if(LOG.isDebugEnabled())
LOG.debug("External redirect issued from flow with URL " + redirect.getUrl());
// redirect to external URL
String externalUrl = argumentHandler.createExternalUrl(redirect,
responseInstruction.getFlowExecutionKey(), context);
setResult(new ModelAndView(new RedirectView(externalUrl)));
}
protected void handleNull() throws Exception {
// no response to issue
setResult(null);
}
}.handleQuietly(responseInstruction).getResult();
}
/**
* Factory method that creates a new helper for processing a request into
* this flow controller. The handler is a basic template encapsulating
* reusable flow execution request handling workflow.
* This implementation just creates a new {@link FlowRequestHandler}.
* @param argumentHandler The FlowExecutorArgumentHandler to use
*
* @return the controller helper
*/
protected FlowRequestHandler createRequestHandler(FlowExecutorArgumentHandler argumentHandler ) {
return new FlowRequestHandler(getFlowExecutor(), argumentHandler);
}
/**
* Invokes the action defined by the webRequest for the given arguments
*
* @param controller The controller instance
* @param controllerClass The GrailsControllerClass that defines the conventions within the controller
* @param viewName The name of the view to delegate to if necessary
* @param request The HttpServletRequest object
* @param response The HttpServletResponse object
* @param params A map of parameters
* @return A Spring ModelAndView instance
*/
protected ModelAndView executeAction(GroovyObject controller, GrailsControllerClass controllerClass, String viewName, HttpServletRequest request, HttpServletResponse response, Map params) {
// Step 5a: Check if there is a before interceptor if there is execute it
ClassLoader cl = Thread.currentThread().getContextClassLoader();
try {
// Step 6: get closure from closure property
Closure action;
try {
action = (Closure)controller.getProperty(actionName);
}
catch(MissingPropertyException mpe) {
if(controllerClass.isScaffolding())
throw new IllegalStateException("Scaffolder supports action ["+actionName +"] for controller ["+controllerClass.getFullName()+"] but getAction returned null!");
else {
try {
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return null;
} catch (IOException e) {
throw new ControllerExecutionException("I/O error sending 404 error",e);
}
}
}
// Step 7: process the action
Object returnValue = handleAction( controller,action,request,response,params );
// Step 8: determine return value type and handle accordingly
initChainModel(controller);
if(response.isCommitted()) {
if(LOG.isDebugEnabled()) {
LOG.debug("Response has been redirected, returning null model and view");
}
return null;
}
else {
if(LOG.isDebugEnabled()) {
LOG.debug("Action ["+actionName+"] executed with result ["+returnValue+"] and view name ["+viewName+"]");
}
ModelAndView mv = handleActionResponse(controller,returnValue,actionName,viewName);
if(LOG.isDebugEnabled()) {
LOG.debug("Action ["+actionName+"] handled, created Spring model and view ["+mv+"]");
}
return mv;
}
} finally {
Thread.currentThread().setContextClassLoader(cl);
}
}
private boolean invokeBeforeInterceptor(GroovyObject controller, GrailsControllerClass controllerClass) {
boolean executeAction = true;
if(controllerClass.isInterceptedBefore(controller,actionName)) {
Closure beforeInterceptor = controllerClass.getBeforeInterceptor(controller);
if(beforeInterceptor!= null) {
if(beforeInterceptor.getDelegate() != controller) {
beforeInterceptor.setDelegate(controller);
beforeInterceptor.setResolveStrategy(Closure.DELEGATE_FIRST);
}
Object interceptorResult = beforeInterceptor.call();
if(interceptorResult instanceof Boolean) {
executeAction = ((Boolean)interceptorResult).booleanValue();
}
}
}
return executeAction;
}
private GrailsScaffolder obtainScaffolder(GrailsControllerClass controllerClass) {
GrailsScaffolder scaffolder = null;
if(controllerClass.isScaffolding()) {
scaffolder = (GrailsScaffolder)applicationContext.getBean( controllerClass.getFullName() + SCAFFOLDER );
if(scaffolder == null)
throw new IllegalStateException("Scaffolding set to true for controller ["+controllerClass.getFullName()+"] but no scaffolder available!");
}
return scaffolder;
}
private void configureStateForWebRequest(GrailsWebRequest webRequest, HttpServletRequest request) {
this.webRequest = webRequest;
this.actionName = webRequest.getActionName();
this.controllerName = webRequest.getControllerName();
this.id = webRequest.getId();
if(StringUtils.isBlank(id) && request.getParameter(GrailsWebRequest.ID_PARAMETER) != null) {
id = request.getParameter(GrailsWebRequest.ID_PARAMETER);
}
}
private boolean invokeAfterInterceptor(GrailsControllerClass controllerClass, GroovyObject controller, ModelAndView mv) {
// Step 9: Check if there is after interceptor
Object interceptorResult = null;
if(controllerClass.isInterceptedAfter(controller,actionName)) {
Closure afterInterceptor = controllerClass.getAfterInterceptor(controller);
if(afterInterceptor.getDelegate() != controller) {
afterInterceptor.setDelegate(controller);
afterInterceptor.setResolveStrategy(Closure.DELEGATE_FIRST);
}
Map model = new HashMap();
if(mv != null) {
model = mv.getModel() != null ? mv.getModel() : new HashMap();
}
switch(afterInterceptor.getMaximumNumberOfParameters()){
case 1:
interceptorResult = afterInterceptor.call(new Object[]{ model });
break;
case 2:
interceptorResult = afterInterceptor.call(new Object[]{ model, mv });
break;
default:
throw new ControllerExecutionException("AfterInterceptor closure must accept one or two parameters");
}
}
return !(interceptorResult != null && interceptorResult instanceof Boolean) || ((Boolean) interceptorResult).booleanValue();
}
private String checkDispatchAction(HttpServletRequest request, String uri) {
for(Enumeration e = request.getParameterNames(); e.hasMoreElements();) {
String name = (String)e.nextElement();
if(name.startsWith(DISPATCH_ACTION_PARAMETER)) {
// remove .x suffix in case of submit image
if (name.endsWith(".x") || name.endsWith(".y")) {
name = name.substring(0, name.length()-2);
}
actionName = GrailsClassUtils.getPropertyNameRepresentation(name.substring((DISPATCH_ACTION_PARAMETER).length()));
StringBuffer sb = new StringBuffer();
sb.append('/').append(controllerName).append('/').append(actionName);
uri = sb.toString();
break;
}
}
return uri;
}
public GrailsApplicationAttributes getGrailsAttributes() {
return this.grailsAttributes;
}
public Object handleAction(GroovyObject controller,Closure action, HttpServletRequest request, HttpServletResponse response) {
return handleAction(controller,action,request,response,Collections.EMPTY_MAP);
}
public Object handleAction(GroovyObject controller,Closure action, HttpServletRequest request, HttpServletResponse response, Map params) {
GrailsParameterMap paramsMap = (GrailsParameterMap)controller.getProperty("params");
// if there are additional params add them to the params dynamic property
if(params != null && !params.isEmpty()) {
paramsMap.putAll( params );
}
Object returnValue = action.call();
// Step 8: add any errors to the request
request.setAttribute( GrailsApplicationAttributes.ERRORS, controller.getProperty(ControllerDynamicMethods.ERRORS_PROPERTY) );
return returnValue;
}
/* (non-Javadoc)
* @see org.codehaus.groovy.grails.web.servlet.mvc.GrailsControllerHelper#handleActionResponse(org.codehaus.groovy.grails.commons.GrailsControllerClass, java.lang.Object, java.lang.String, java.lang.String)
*/
public ModelAndView handleActionResponse( GroovyObject controller,Object returnValue,String closurePropertyName, String viewName) {
boolean viewNameBlank = (viewName == null || viewName.length() == 0);
// reset the metaclass
ModelAndView explicityModelAndView = (ModelAndView)controller.getProperty(ControllerDynamicMethods.MODEL_AND_VIEW_PROPERTY);
if(!webRequest.isRenderView()) {
return null;
}
else if(explicityModelAndView != null) {
return explicityModelAndView;
}
else if (returnValue == null) {
if (viewNameBlank) {
return null;
} else {
Map model;
if(!this.chainModel.isEmpty()) {
model = new CompositeMap(this.chainModel, new BeanMap(controller));
}
else {
model = new BeanMap(controller);
}
return new ModelAndView(viewName, model);
}
} else if (returnValue instanceof Map) {
// remove any Proxy wrappers and set the adaptee as the value
Map returnModel = (Map)returnValue;
removeProxiesFromModelObjects(returnModel);
if(!this.chainModel.isEmpty()) {
returnModel.putAll(this.chainModel);
}
return new ModelAndView(viewName, returnModel);
} else if (returnValue instanceof ModelAndView) {
ModelAndView modelAndView = (ModelAndView)returnValue;
// remove any Proxy wrappers and set the adaptee as the value
Map modelMap = modelAndView.getModel();
removeProxiesFromModelObjects(modelMap);
if(!this.chainModel.isEmpty()) {
modelAndView.addAllObjects(this.chainModel);
}
if (modelAndView.getView() == null && modelAndView.getViewName() == null) {
if (viewNameBlank) {
throw new NoViewNameDefinedException("ModelAndView instance returned by and no view name defined by nor for closure on property [" + closurePropertyName + "] in controller [" + controller.getClass() + "]!");
} else {
modelAndView.setViewName(viewName);
}
}
return modelAndView;
}
else {
Map model;
if(!this.chainModel.isEmpty()) {
model = new CompositeMap(this.chainModel, new BeanMap(controller));
}
else {
model = new BeanMap(controller);
}
return new ModelAndView(viewName, model);
}
}
private void initChainModel(GroovyObject controller) {
FlashScope fs = this.grailsAttributes.getFlashScope((HttpServletRequest)controller.getProperty(ControllerDynamicMethods.REQUEST_PROPERTY));
if(fs.containsKey(ChainDynamicMethod.PROPERTY_CHAIN_MODEL)) {
this.chainModel = (Map)fs.get(ChainDynamicMethod.PROPERTY_CHAIN_MODEL);
if(this.chainModel == null)
this.chainModel = Collections.EMPTY_MAP;
}
}
/**
* Retrieves the FlowExecutor instance stored in the ApplicationContext
*
* @return The FlowExecution
*/
protected FlowExecutor getFlowExecutor() {
if(applicationContext.containsBean(FLOW_EXECUTOR_BEAN)) {
return (FlowExecutor)applicationContext.getBean(FLOW_EXECUTOR_BEAN);
}
return null;
}
}
| true
| true
|
public ModelAndView handleURI(String uri, GrailsWebRequest webRequest, Map params) {
if(uri == null) {
throw new IllegalArgumentException("Controller URI [" + uri + "] cannot be null!");
}
HttpServletRequest request = webRequest.getCurrentRequest();
HttpServletResponse response = webRequest.getCurrentResponse();
configureStateForWebRequest(webRequest, request);
// if the action name is blank check its included as dispatch parameter
if(StringUtils.isBlank(actionName)) {
uri = checkDispatchAction(request, uri);
}
if(uri.endsWith("/")) {
uri = uri.substring(0,uri.length() - 1);
}
// if the id is blank check if its a request parameter
// Step 2: lookup the controller in the application.
GrailsControllerClass controllerClass = getControllerClassByURI(uri);
if (controllerClass == null) {
throw new UnknownControllerException("No controller found for URI [" + uri + "]!");
}
actionName = controllerClass.getClosurePropertyName(uri);
webRequest.setActionName(actionName);
if(LOG.isDebugEnabled()) {
LOG.debug("Processing request for controller ["+controllerName+"], action ["+actionName+"], and id ["+id+"]");
}
controllerActionURI = SLASH + controllerName + SLASH + actionName + SLASH;
// Step 3: load controller from application context.
GroovyObject controller = getControllerInstance(controllerClass);
if(!controllerClass.isHttpMethodAllowedForAction(controller, request.getMethod(), actionName)) {
try {
response.sendError(HttpServletResponse.SC_FORBIDDEN);
return null;
} catch (IOException e) {
throw new ControllerExecutionException("I/O error sending 403 error",e);
}
}
request.setAttribute( GrailsApplicationAttributes.CONTROLLER, controller );
// Step 3: if scaffolding retrieve scaffolder
GrailsScaffolder scaffolder = obtainScaffolder(controllerClass);
if (StringUtils.isBlank(actionName)) {
// Step 4a: Check if scaffolding
if( controllerClass.isScaffolding() && !scaffolder.supportsAction(actionName))
throw new NoClosurePropertyForURIException("Could not find closure property for URI [" + uri + "] for controller [" + controllerClass.getFullName() + "]!");
}
// Step 4: Set grails attributes in request scope
request.setAttribute(GrailsApplicationAttributes.REQUEST_SCOPE_ID,this.grailsAttributes);
// Step 5: get the view name for this URI.
String viewName = controllerClass.getViewByURI(uri);
boolean executeAction = invokeBeforeInterceptor(controller, controllerClass);
// if the interceptor returned false don't execute the action
if(!executeAction)
return null;
ModelAndView mv;
if(controllerClass.isFlowAction(actionName)) {
mv = executeFlow(webRequest,request, response);
}
else {
mv = executeAction(controller, controllerClass, viewName, request, response, params);
}
boolean returnModelAndView = invokeAfterInterceptor(controllerClass, controller, mv);
return returnModelAndView ? mv : null;
}
|
public ModelAndView handleURI(String uri, GrailsWebRequest webRequest, Map params) {
if(uri == null) {
throw new IllegalArgumentException("Controller URI [" + uri + "] cannot be null!");
}
HttpServletRequest request = webRequest.getCurrentRequest();
HttpServletResponse response = webRequest.getCurrentResponse();
configureStateForWebRequest(webRequest, request);
// if the action name is blank check its included as dispatch parameter
if(StringUtils.isBlank(actionName)) {
uri = checkDispatchAction(request, uri);
}
if(uri.endsWith("/")) {
uri = uri.substring(0,uri.length() - 1);
}
// if the id is blank check if its a request parameter
// Step 2: lookup the controller in the application.
GrailsControllerClass controllerClass = getControllerClassByURI(uri);
if (controllerClass == null) {
throw new UnknownControllerException("No controller found for URI [" + uri + "]!");
}
actionName = controllerClass.getClosurePropertyName(uri);
webRequest.setActionName(actionName);
if(LOG.isDebugEnabled()) {
LOG.debug("Processing request for controller ["+controllerName+"], action ["+actionName+"], and id ["+id+"]");
}
controllerActionURI = SLASH + controllerName + SLASH + actionName + SLASH;
// Step 3: load controller from application context.
GroovyObject controller = getControllerInstance(controllerClass);
if(!controllerClass.isHttpMethodAllowedForAction(controller, request.getMethod(), actionName)) {
try {
response.sendError(HttpServletResponse.SC_FORBIDDEN);
return null;
} catch (IOException e) {
throw new ControllerExecutionException("I/O error sending 403 error",e);
}
}
request.setAttribute( GrailsApplicationAttributes.CONTROLLER, controller );
// Step 3: if scaffolding retrieve scaffolder
GrailsScaffolder scaffolder = obtainScaffolder(controllerClass);
if (StringUtils.isBlank(actionName)) {
// Step 4a: Check if scaffolding
if( controllerClass.isScaffolding() && !scaffolder.supportsAction(actionName))
throw new NoClosurePropertyForURIException("Could not find closure property for URI [" + uri + "] for controller [" + controllerClass.getFullName() + "]!");
}
// Step 4: Set grails attributes in request scope
request.setAttribute(GrailsApplicationAttributes.REQUEST_SCOPE_ID,this.grailsAttributes);
// Step 5: get the view name for this URI.
String viewName = controllerClass.getViewByURI(uri);
boolean executeAction = invokeBeforeInterceptor(controller, controllerClass);
// if the interceptor returned false don't execute the action
if(!executeAction)
return null;
ModelAndView mv;
if(controllerClass.isFlowAction(actionName)) {
mv = executeFlow(webRequest,request, response);
}
else {
mv = executeAction(controller, controllerClass, viewName, request, response, params);
}
boolean returnModelAndView = invokeAfterInterceptor(controllerClass, controller, mv) && !response.isCommitted();
return returnModelAndView ? mv : null;
}
|
diff --git a/WoT/FMSMessageManagerWoT.java b/WoT/FMSMessageManagerWoT.java
index eed54157..4f84b58b 100644
--- a/WoT/FMSMessageManagerWoT.java
+++ b/WoT/FMSMessageManagerWoT.java
@@ -1,69 +1,69 @@
/* This code is part of Freenet. It is distributed under the GNU General
* Public License, version 2 (or at your option any later version). See
* http://www.gnu.org/ for further details of the GPL. */
package plugins.FMSPlugin.WoT;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import freenet.keys.FreenetURI;
import freenet.support.UpdatableSortedLinkedList;
import freenet.support.UpdatableSortedLinkedListKilledException;
import freenet.support.UpdatableSortedLinkedListWithForeignIndex;
import plugins.FMSPlugin.FMSBoard;
import plugins.FMSPlugin.FMSIdentityManager;
import plugins.FMSPlugin.FMSMessage;
import plugins.FMSPlugin.FMSMessageManager;
import plugins.FMSPlugin.FMSOwnIdentity;
public class FMSMessageManagerWoT implements FMSMessageManager {
/**
* Contains all boards which where found in a message. References to all messages of a board are stored in
* the board. Adding a newly downloaded message therefore is done by searching its board and calling
* <code>addMessage()</code> on that board. Further, the message is also added to mMessages, see below.
*/
private UpdatableSortedLinkedListWithForeignIndex mBoards = new UpdatableSortedLinkedListWithForeignIndex();
/**
* Contains all messages, even though they are also stored in their FMSBoard. Used for checking whether
* a message was already downloaded or not.
*/
private Hashtable<FreenetURI, FMSMessageWoT> mMessages = new Hashtable<FreenetURI, FMSMessageWoT>();
private ArrayList<FMSOwnIdentityWoT> mOwnIdentites = new ArrayList<FMSOwnIdentityWoT>();
public FMSMessage get(FreenetURI uri) {
return mMessages.get(uri);
}
public synchronized FMSBoard getBoardByName(String name) {
return (FMSBoard)mBoards.get(name);
}
public synchronized Iterator<FMSBoard> boardIterator(FMSOwnIdentity identity) {
return (Iterator<FMSBoard>)mBoards.iterator();
}
private synchronized boolean shouldDownloadMessage(FreenetURI uri) {
return (mMessages.containsKey(uri));
}
private synchronized void onMessageReceived(String newMessageData) throws UpdatableSortedLinkedListKilledException {
- FMSMessageWoT newMessage = new FMSMessageWoT(null, null, null, null, null, null, null, null);
+ FMSMessageWoT newMessage = new FMSMessageWoT(null, null, null, null, null, null, null, null, null);
String boardName = "";
String boardDescription = "";
FMSBoard board = getBoardByName(boardName);
if(board == null) {
board = new FMSBoard(this, boardName, boardDescription);
mBoards.add(board);
}
mMessages.put(newMessage.getURI(), newMessage);
board.addMessage(newMessage);
}
}
| true
| true
|
private synchronized void onMessageReceived(String newMessageData) throws UpdatableSortedLinkedListKilledException {
FMSMessageWoT newMessage = new FMSMessageWoT(null, null, null, null, null, null, null, null);
String boardName = "";
String boardDescription = "";
FMSBoard board = getBoardByName(boardName);
if(board == null) {
board = new FMSBoard(this, boardName, boardDescription);
mBoards.add(board);
}
mMessages.put(newMessage.getURI(), newMessage);
board.addMessage(newMessage);
}
|
private synchronized void onMessageReceived(String newMessageData) throws UpdatableSortedLinkedListKilledException {
FMSMessageWoT newMessage = new FMSMessageWoT(null, null, null, null, null, null, null, null, null);
String boardName = "";
String boardDescription = "";
FMSBoard board = getBoardByName(boardName);
if(board == null) {
board = new FMSBoard(this, boardName, boardDescription);
mBoards.add(board);
}
mMessages.put(newMessage.getURI(), newMessage);
board.addMessage(newMessage);
}
|
diff --git a/src/main/java/com/daveoxley/cnery/scenes/SceneActionSchedule.java b/src/main/java/com/daveoxley/cnery/scenes/SceneActionSchedule.java
index 7e72b95..55edebd 100644
--- a/src/main/java/com/daveoxley/cnery/scenes/SceneActionSchedule.java
+++ b/src/main/java/com/daveoxley/cnery/scenes/SceneActionSchedule.java
@@ -1,71 +1,73 @@
/**
* C-Nery - A home automation web application for C-Bus.
* Copyright (C) 2008,2009,2012 Dave Oxley <dave@daveoxley.co.uk>.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.daveoxley.cnery.scenes;
import com.daveoxley.cnery.actions.SceneActionHome;
import com.daveoxley.cnery.entities.AbstractCondition;
import com.daveoxley.cnery.entities.Scene;
import com.daveoxley.cnery.entities.SceneAction;
import com.daveoxley.cnery.entities.SceneActionCondition;
import com.workplacesystems.queuj.schedule.VariableSchedule;
import java.util.Calendar;
import java.util.GregorianCalendar;
import org.jboss.seam.Component;
/**
*
* @author Dave Oxley <dave@daveoxley.co.uk>
*/
public class SceneActionSchedule extends VariableSchedule {
private SceneAction sceneAction;
void setSceneAction(SceneAction sceneAction) {
this.sceneAction = sceneAction;
}
@Override
protected GregorianCalendar getNextRunTime(GregorianCalendar startTime) {
SceneActionHome sah = (SceneActionHome)Component.getInstance(SceneActionHome.class, true);
sah.clearInstance();
sah.setId(sceneAction.getId());
sceneAction = sah.getInstance();
boolean firstRun = sceneAction.isFirstRun();
Scene scene = sceneAction.getScene();
GregorianCalendar nextCheckTime = null;
- if (firstRun) {
- nextCheckTime = (GregorianCalendar)startTime.clone();
- nextCheckTime.add(Calendar.SECOND, sceneAction.getDelay());
- }
for (SceneActionCondition sac : sceneAction.getConditions()) {
if ((sac.getSceneState() != SceneActionCondition.SceneState.TRIGGERED && scene.getStatePersistence() != Scene.StatePersistence.TRIGGER) || firstRun) {
if (sac.getActionType() == AbstractCondition.ActionType.TIME) {
GregorianCalendar actionTime = sac.getNextActionGregorian(startTime);
if (nextCheckTime == null || actionTime.before(nextCheckTime))
nextCheckTime = actionTime;
}
}
}
+ if (firstRun) {
+ GregorianCalendar delayTime = (GregorianCalendar)startTime.clone();
+ delayTime.add(Calendar.SECOND, sceneAction.getDelay());
+ if (nextCheckTime == null || delayTime.after(nextCheckTime))
+ return delayTime;
+ }
return nextCheckTime;
}
}
| false
| true
|
protected GregorianCalendar getNextRunTime(GregorianCalendar startTime) {
SceneActionHome sah = (SceneActionHome)Component.getInstance(SceneActionHome.class, true);
sah.clearInstance();
sah.setId(sceneAction.getId());
sceneAction = sah.getInstance();
boolean firstRun = sceneAction.isFirstRun();
Scene scene = sceneAction.getScene();
GregorianCalendar nextCheckTime = null;
if (firstRun) {
nextCheckTime = (GregorianCalendar)startTime.clone();
nextCheckTime.add(Calendar.SECOND, sceneAction.getDelay());
}
for (SceneActionCondition sac : sceneAction.getConditions()) {
if ((sac.getSceneState() != SceneActionCondition.SceneState.TRIGGERED && scene.getStatePersistence() != Scene.StatePersistence.TRIGGER) || firstRun) {
if (sac.getActionType() == AbstractCondition.ActionType.TIME) {
GregorianCalendar actionTime = sac.getNextActionGregorian(startTime);
if (nextCheckTime == null || actionTime.before(nextCheckTime))
nextCheckTime = actionTime;
}
}
}
return nextCheckTime;
}
|
protected GregorianCalendar getNextRunTime(GregorianCalendar startTime) {
SceneActionHome sah = (SceneActionHome)Component.getInstance(SceneActionHome.class, true);
sah.clearInstance();
sah.setId(sceneAction.getId());
sceneAction = sah.getInstance();
boolean firstRun = sceneAction.isFirstRun();
Scene scene = sceneAction.getScene();
GregorianCalendar nextCheckTime = null;
for (SceneActionCondition sac : sceneAction.getConditions()) {
if ((sac.getSceneState() != SceneActionCondition.SceneState.TRIGGERED && scene.getStatePersistence() != Scene.StatePersistence.TRIGGER) || firstRun) {
if (sac.getActionType() == AbstractCondition.ActionType.TIME) {
GregorianCalendar actionTime = sac.getNextActionGregorian(startTime);
if (nextCheckTime == null || actionTime.before(nextCheckTime))
nextCheckTime = actionTime;
}
}
}
if (firstRun) {
GregorianCalendar delayTime = (GregorianCalendar)startTime.clone();
delayTime.add(Calendar.SECOND, sceneAction.getDelay());
if (nextCheckTime == null || delayTime.after(nextCheckTime))
return delayTime;
}
return nextCheckTime;
}
|
diff --git a/android/src/com/google/zxing/client/android/CaptureActivity.java b/android/src/com/google/zxing/client/android/CaptureActivity.java
index 8b9c9259..f02f740f 100755
--- a/android/src/com/google/zxing/client/android/CaptureActivity.java
+++ b/android/src/com/google/zxing/client/android/CaptureActivity.java
@@ -1,636 +1,632 @@
/*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.Result;
import com.google.zxing.ResultMetadataType;
import com.google.zxing.ResultPoint;
import com.google.zxing.client.android.camera.CameraManager;
import com.google.zxing.client.android.history.HistoryManager;
import com.google.zxing.client.android.result.ResultButtonListener;
import com.google.zxing.client.android.result.ResultHandler;
import com.google.zxing.client.android.result.ResultHandlerFactory;
import com.google.zxing.client.android.result.supplement.SupplementalInfoRetriever;
import com.google.zxing.client.android.share.ShareActivity;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.preference.PreferenceManager;
import android.text.ClipboardManager;
import android.util.Log;
import android.util.TypedValue;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import java.io.IOException;
import java.text.DateFormat;
import java.util.Date;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
/**
* The barcode reader activity itself. This is loosely based on the CameraPreview
* example included in the Android SDK.
*
* @author dswitkin@google.com (Daniel Switkin)
* @author Sean Owen
*/
public final class CaptureActivity extends Activity implements SurfaceHolder.Callback {
private static final String TAG = CaptureActivity.class.getSimpleName();
private static final int SHARE_ID = Menu.FIRST;
private static final int HISTORY_ID = Menu.FIRST + 1;
private static final int SETTINGS_ID = Menu.FIRST + 2;
private static final int HELP_ID = Menu.FIRST + 3;
private static final int ABOUT_ID = Menu.FIRST + 4;
private static final long INTENT_RESULT_DURATION = 1500L;
private static final long BULK_MODE_SCAN_DELAY_MS = 1000L;
private static final String PACKAGE_NAME = "com.google.zxing.client.android";
private static final String PRODUCT_SEARCH_URL_PREFIX = "http://www.google";
private static final String PRODUCT_SEARCH_URL_SUFFIX = "/m/products/scan";
private static final String ZXING_URL = "http://zxing.appspot.com/scan";
private static final String RETURN_CODE_PLACEHOLDER = "{CODE}";
private static final String RETURN_URL_PARAM = "ret";
private static final Set<ResultMetadataType> DISPLAYABLE_METADATA_TYPES;
static {
DISPLAYABLE_METADATA_TYPES = new HashSet<ResultMetadataType>(5);
DISPLAYABLE_METADATA_TYPES.add(ResultMetadataType.ISSUE_NUMBER);
DISPLAYABLE_METADATA_TYPES.add(ResultMetadataType.SUGGESTED_PRICE);
DISPLAYABLE_METADATA_TYPES.add(ResultMetadataType.ERROR_CORRECTION_LEVEL);
DISPLAYABLE_METADATA_TYPES.add(ResultMetadataType.POSSIBLE_COUNTRY);
}
private enum Source {
NATIVE_APP_INTENT,
PRODUCT_SEARCH_LINK,
ZXING_LINK,
NONE
}
private CaptureActivityHandler handler;
private ViewfinderView viewfinderView;
private TextView statusView;
private View resultView;
private Result lastResult;
private boolean hasSurface;
private boolean copyToClipboard;
private Source source;
private String sourceUrl;
private String returnUrlTemplate;
private Vector<BarcodeFormat> decodeFormats;
private String characterSet;
private String versionName;
private HistoryManager historyManager;
private InactivityTimer inactivityTimer;
private BeepManager beepManager;
private final DialogInterface.OnClickListener aboutListener = new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialogInterface, int i) {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(getString(R.string.zxing_url)));
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
startActivity(intent);
}
};
ViewfinderView getViewfinderView() {
return viewfinderView;
}
public Handler getHandler() {
return handler;
}
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
Window window = getWindow();
window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.capture);
CameraManager.init(getApplication());
viewfinderView = (ViewfinderView) findViewById(R.id.viewfinder_view);
resultView = findViewById(R.id.result_view);
statusView = (TextView) findViewById(R.id.status_view);
handler = null;
lastResult = null;
hasSurface = false;
historyManager = new HistoryManager(this);
historyManager.trimHistory();
inactivityTimer = new InactivityTimer(this);
beepManager = new BeepManager(this);
showHelpOnFirstLaunch();
}
@Override
protected void onResume() {
super.onResume();
resetStatusView();
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
if (hasSurface) {
// The activity was paused but not stopped, so the surface still exists. Therefore
// surfaceCreated() won't be called, so init the camera here.
initCamera(surfaceHolder);
} else {
// Install the callback and wait for surfaceCreated() to init the camera.
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
Intent intent = getIntent();
String action = intent == null ? null : intent.getAction();
String dataString = intent == null ? null : intent.getDataString();
if (intent != null && action != null) {
if (action.equals(Intents.Scan.ACTION)) {
// Scan the formats the intent requested, and return the result to the calling activity.
source = Source.NATIVE_APP_INTENT;
decodeFormats = DecodeFormatManager.parseDecodeFormats(intent);
} else if (dataString != null && dataString.contains(PRODUCT_SEARCH_URL_PREFIX) &&
dataString.contains(PRODUCT_SEARCH_URL_SUFFIX)) {
// Scan only products and send the result to mobile Product Search.
source = Source.PRODUCT_SEARCH_LINK;
sourceUrl = dataString;
decodeFormats = DecodeFormatManager.PRODUCT_FORMATS;
} else if (dataString != null && dataString.startsWith(ZXING_URL)) {
// Scan formats requested in query string (all formats if none specified).
// If a return URL is specified, send the results there. Otherwise, handle it ourselves.
source = Source.ZXING_LINK;
sourceUrl = dataString;
Uri inputUri = Uri.parse(sourceUrl);
returnUrlTemplate = inputUri.getQueryParameter(RETURN_URL_PARAM);
decodeFormats = DecodeFormatManager.parseDecodeFormats(inputUri);
} else {
// Scan all formats and handle the results ourselves (launched from Home).
source = Source.NONE;
decodeFormats = null;
}
characterSet = intent.getStringExtra(Intents.Scan.CHARACTER_SET);
} else {
source = Source.NONE;
decodeFormats = null;
characterSet = null;
}
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
copyToClipboard = prefs.getBoolean(PreferencesActivity.KEY_COPY_TO_CLIPBOARD, true);
beepManager.updatePrefs();
}
@Override
protected void onPause() {
super.onPause();
if (handler != null) {
handler.quitSynchronously();
handler = null;
}
CameraManager.get().closeDriver();
}
@Override
protected void onDestroy() {
inactivityTimer.shutdown();
super.onDestroy();
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (source == Source.NATIVE_APP_INTENT) {
setResult(RESULT_CANCELED);
finish();
return true;
} else if ((source == Source.NONE || source == Source.ZXING_LINK) && lastResult != null) {
resetStatusView();
if (handler != null) {
handler.sendEmptyMessage(R.id.restart_preview);
}
return true;
}
} else if (keyCode == KeyEvent.KEYCODE_FOCUS || keyCode == KeyEvent.KEYCODE_CAMERA) {
// Handle these events so they don't launch the Camera app
return true;
}
return super.onKeyDown(keyCode, event);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
menu.add(0, SHARE_ID, 0, R.string.menu_share)
.setIcon(android.R.drawable.ic_menu_share);
menu.add(0, HISTORY_ID, 0, R.string.menu_history)
.setIcon(android.R.drawable.ic_menu_recent_history);
menu.add(0, SETTINGS_ID, 0, R.string.menu_settings)
.setIcon(android.R.drawable.ic_menu_preferences);
menu.add(0, HELP_ID, 0, R.string.menu_help)
.setIcon(android.R.drawable.ic_menu_help);
menu.add(0, ABOUT_ID, 0, R.string.menu_about)
.setIcon(android.R.drawable.ic_menu_info_details);
return true;
}
// Don't display the share menu item if the result overlay is showing.
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
super.onPrepareOptionsMenu(menu);
menu.findItem(SHARE_ID).setVisible(lastResult == null);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case SHARE_ID: {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
intent.setClassName(this, ShareActivity.class.getName());
startActivity(intent);
break;
}
case HISTORY_ID: {
AlertDialog historyAlert = historyManager.buildAlert();
historyAlert.show();
break;
}
case SETTINGS_ID: {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
intent.setClassName(this, PreferencesActivity.class.getName());
startActivity(intent);
break;
}
case HELP_ID: {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
intent.setClassName(this, HelpActivity.class.getName());
startActivity(intent);
break;
}
case ABOUT_ID:
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(getString(R.string.title_about) + versionName);
builder.setMessage(getString(R.string.msg_about) + "\n\n" + getString(R.string.zxing_url));
builder.setIcon(R.drawable.launcher_icon);
builder.setPositiveButton(R.string.button_open_browser, aboutListener);
builder.setNegativeButton(R.string.button_cancel, null);
builder.show();
break;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onConfigurationChanged(Configuration config) {
// Do nothing, this is to prevent the activity from being restarted when the keyboard opens.
super.onConfigurationChanged(config);
}
public void surfaceCreated(SurfaceHolder holder) {
if (!hasSurface) {
hasSurface = true;
initCamera(holder);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
hasSurface = false;
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
/**
* A valid barcode has been found, so give an indication of success and show the results.
*
* @param rawResult The contents of the barcode.
* @param barcode A greyscale bitmap of the camera data which was decoded.
*/
public void handleDecode(Result rawResult, Bitmap barcode) {
inactivityTimer.onActivity();
lastResult = rawResult;
historyManager.addHistoryItem(rawResult);
if (barcode == null) {
// This is from history -- no saved barcode
handleDecodeInternally(rawResult, null);
} else {
beepManager.playBeepSoundAndVibrate();
drawResultPoints(barcode, rawResult);
switch (source) {
case NATIVE_APP_INTENT:
case PRODUCT_SEARCH_LINK:
handleDecodeExternally(rawResult, barcode);
break;
case ZXING_LINK:
if (returnUrlTemplate == null){
handleDecodeInternally(rawResult, barcode);
} else {
handleDecodeExternally(rawResult, barcode);
}
break;
case NONE:
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
if (prefs.getBoolean(PreferencesActivity.KEY_BULK_MODE, false)) {
Toast.makeText(this, R.string.msg_bulk_mode_scanned, Toast.LENGTH_SHORT).show();
// Wait a moment or else it will scan the same barcode continuously about 3 times
if (handler != null) {
handler.sendEmptyMessageDelayed(R.id.restart_preview, BULK_MODE_SCAN_DELAY_MS);
}
resetStatusView();
} else {
handleDecodeInternally(rawResult, barcode);
}
break;
}
}
}
/**
* Superimpose a line for 1D or dots for 2D to highlight the key features of the barcode.
*
* @param barcode A bitmap of the captured image.
* @param rawResult The decoded results which contains the points to draw.
*/
private void drawResultPoints(Bitmap barcode, Result rawResult) {
ResultPoint[] points = rawResult.getResultPoints();
if (points != null && points.length > 0) {
Canvas canvas = new Canvas(barcode);
Paint paint = new Paint();
paint.setColor(getResources().getColor(R.color.result_image_border));
paint.setStrokeWidth(3.0f);
paint.setStyle(Paint.Style.STROKE);
Rect border = new Rect(2, 2, barcode.getWidth() - 2, barcode.getHeight() - 2);
canvas.drawRect(border, paint);
paint.setColor(getResources().getColor(R.color.result_points));
if (points.length == 2) {
paint.setStrokeWidth(4.0f);
drawLine(canvas, paint, points[0], points[1]);
} else if (points.length == 4 &&
(rawResult.getBarcodeFormat().equals(BarcodeFormat.UPC_A)) ||
(rawResult.getBarcodeFormat().equals(BarcodeFormat.EAN_13))) {
// Hacky special case -- draw two lines, for the barcode and metadata
drawLine(canvas, paint, points[0], points[1]);
drawLine(canvas, paint, points[2], points[3]);
} else {
paint.setStrokeWidth(10.0f);
for (ResultPoint point : points) {
canvas.drawPoint(point.getX(), point.getY(), paint);
}
}
}
}
private static void drawLine(Canvas canvas, Paint paint, ResultPoint a, ResultPoint b) {
canvas.drawLine(a.getX(), a.getY(), b.getX(), b.getY(), paint);
}
// Put up our own UI for how to handle the decoded contents.
private void handleDecodeInternally(Result rawResult, Bitmap barcode) {
statusView.setVisibility(View.GONE);
viewfinderView.setVisibility(View.GONE);
resultView.setVisibility(View.VISIBLE);
ImageView barcodeImageView = (ImageView) findViewById(R.id.barcode_image_view);
if (barcode == null) {
barcodeImageView.setImageBitmap(BitmapFactory.decodeResource(getResources(),
R.drawable.launcher_icon));
} else {
barcodeImageView.setImageBitmap(barcode);
}
TextView formatTextView = (TextView) findViewById(R.id.format_text_view);
formatTextView.setText(rawResult.getBarcodeFormat().toString());
ResultHandler resultHandler = ResultHandlerFactory.makeResultHandler(this, rawResult);
TextView typeTextView = (TextView) findViewById(R.id.type_text_view);
typeTextView.setText(resultHandler.getType().toString());
DateFormat formatter = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT);
String formattedTime = formatter.format(new Date(rawResult.getTimestamp()));
TextView timeTextView = (TextView) findViewById(R.id.time_text_view);
timeTextView.setText(formattedTime);
TextView metaTextView = (TextView) findViewById(R.id.meta_text_view);
View metaTextViewLabel = findViewById(R.id.meta_text_view_label);
metaTextView.setVisibility(View.GONE);
metaTextViewLabel.setVisibility(View.GONE);
Map<ResultMetadataType,Object> metadata =
(Map<ResultMetadataType,Object>) rawResult.getResultMetadata();
if (metadata != null) {
StringBuilder metadataText = new StringBuilder(20);
for (Map.Entry<ResultMetadataType,Object> entry : metadata.entrySet()) {
if (DISPLAYABLE_METADATA_TYPES.contains(entry.getKey())) {
metadataText.append(entry.getValue()).append('\n');
}
}
if (metadataText.length() > 0) {
metadataText.setLength(metadataText.length() - 1);
metaTextView.setText(metadataText);
metaTextView.setVisibility(View.VISIBLE);
metaTextViewLabel.setVisibility(View.VISIBLE);
}
}
TextView contentsTextView = (TextView) findViewById(R.id.contents_text_view);
CharSequence displayContents = resultHandler.getDisplayContents();
contentsTextView.setText(displayContents);
// Crudely scale betweeen 22 and 32 -- bigger font for shorter text
int scaledSize = Math.max(22, 32 - displayContents.length() / 4);
contentsTextView.setTextSize(TypedValue.COMPLEX_UNIT_SP, scaledSize);
TextView supplementTextView = (TextView) findViewById(R.id.contents_supplement_text_view);
- if (supplementTextView != null) {
- supplementTextView.setText("");
- supplementTextView.setOnClickListener(null);
- if (PreferenceManager.getDefaultSharedPreferences(this).getBoolean(PreferencesActivity.KEY_SUPPLEMENTAL, true)) {
- SupplementalInfoRetriever.maybeInvokeRetrieval(supplementTextView, resultHandler.getResult(), handler, this);
- }
- } else {
- Log.w(TAG, "Unable to find supplement text view?");
+ supplementTextView.setText("");
+ supplementTextView.setOnClickListener(null);
+ if (PreferenceManager.getDefaultSharedPreferences(this).getBoolean(PreferencesActivity.KEY_SUPPLEMENTAL, true)) {
+ SupplementalInfoRetriever.maybeInvokeRetrieval(supplementTextView, resultHandler.getResult(), handler, this);
}
int buttonCount = resultHandler.getButtonCount();
ViewGroup buttonView = (ViewGroup) findViewById(R.id.result_button_view);
buttonView.requestFocus();
for (int x = 0; x < ResultHandler.MAX_BUTTON_COUNT; x++) {
TextView button = (TextView) buttonView.getChildAt(x);
if (x < buttonCount) {
button.setVisibility(View.VISIBLE);
button.setText(resultHandler.getButtonText(x));
button.setOnClickListener(new ResultButtonListener(resultHandler, x));
} else {
button.setVisibility(View.GONE);
}
}
if (copyToClipboard) {
ClipboardManager clipboard = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE);
clipboard.setText(displayContents);
}
}
// Briefly show the contents of the barcode, then handle the result outside Barcode Scanner.
private void handleDecodeExternally(Result rawResult, Bitmap barcode) {
viewfinderView.drawResultBitmap(barcode);
// Since this message will only be shown for a second, just tell the user what kind of
// barcode was found (e.g. contact info) rather than the full contents, which they won't
// have time to read.
ResultHandler resultHandler = ResultHandlerFactory.makeResultHandler(this, rawResult);
statusView.setText(getString(resultHandler.getDisplayTitle()));
if (copyToClipboard) {
ClipboardManager clipboard = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE);
clipboard.setText(resultHandler.getDisplayContents());
}
if (source == Source.NATIVE_APP_INTENT) {
// Hand back whatever action they requested - this can be changed to Intents.Scan.ACTION when
// the deprecated intent is retired.
Intent intent = new Intent(getIntent().getAction());
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
intent.putExtra(Intents.Scan.RESULT, rawResult.toString());
intent.putExtra(Intents.Scan.RESULT_FORMAT, rawResult.getBarcodeFormat().toString());
Message message = Message.obtain(handler, R.id.return_scan_result);
message.obj = intent;
handler.sendMessageDelayed(message, INTENT_RESULT_DURATION);
} else if (source == Source.PRODUCT_SEARCH_LINK) {
// Reformulate the URL which triggered us into a query, so that the request goes to the same
// TLD as the scan URL.
Message message = Message.obtain(handler, R.id.launch_product_query);
int end = sourceUrl.lastIndexOf("/scan");
message.obj = sourceUrl.substring(0, end) + "?q=" +
resultHandler.getDisplayContents().toString() + "&source=zxing";
handler.sendMessageDelayed(message, INTENT_RESULT_DURATION);
} else if (source == Source.ZXING_LINK) {
// Replace each occurrence of RETURN_CODE_PLACEHOLDER in the returnUrlTemplate
// with the scanned code. This allows both queries and REST-style URLs to work.
Message message = Message.obtain(handler, R.id.launch_product_query);
message.obj = returnUrlTemplate.replace(RETURN_CODE_PLACEHOLDER,
resultHandler.getDisplayContents().toString());
handler.sendMessageDelayed(message, INTENT_RESULT_DURATION);
}
}
/**
* We want the help screen to be shown automatically the first time a new version of the app is
* run. The easiest way to do this is to check android:versionCode from the manifest, and compare
* it to a value stored as a preference.
*/
private boolean showHelpOnFirstLaunch() {
try {
PackageInfo info = getPackageManager().getPackageInfo(PACKAGE_NAME, 0);
int currentVersion = info.versionCode;
// Since we're paying to talk to the PackageManager anyway, it makes sense to cache the app
// version name here for display in the about box later.
this.versionName = info.versionName;
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
int lastVersion = prefs.getInt(PreferencesActivity.KEY_HELP_VERSION_SHOWN, 0);
if (currentVersion > lastVersion) {
prefs.edit().putInt(PreferencesActivity.KEY_HELP_VERSION_SHOWN, currentVersion).commit();
Intent intent = new Intent(this, HelpActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
// Show the default page on a clean install, and the what's new page on an upgrade.
String page = (lastVersion == 0) ? HelpActivity.DEFAULT_PAGE : HelpActivity.WHATS_NEW_PAGE;
intent.putExtra(HelpActivity.REQUESTED_PAGE_KEY, page);
startActivity(intent);
return true;
}
} catch (PackageManager.NameNotFoundException e) {
Log.w(TAG, e);
}
return false;
}
private void initCamera(SurfaceHolder surfaceHolder) {
try {
CameraManager.get().openDriver(surfaceHolder);
} catch (IOException ioe) {
Log.w(TAG, ioe);
displayFrameworkBugMessageAndExit();
return;
} catch (RuntimeException e) {
// Barcode Scanner has seen crashes in the wild of this variety:
// java.?lang.?RuntimeException: Fail to connect to camera service
Log.w(TAG, "Unexpected error initializating camera", e);
displayFrameworkBugMessageAndExit();
return;
}
if (handler == null) {
handler = new CaptureActivityHandler(this, decodeFormats, characterSet);
}
}
private void displayFrameworkBugMessageAndExit() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(getString(R.string.app_name));
builder.setMessage(getString(R.string.msg_camera_framework_bug));
builder.setPositiveButton(R.string.button_ok, new FinishListener(this));
builder.setOnCancelListener(new FinishListener(this));
builder.show();
}
private void resetStatusView() {
resultView.setVisibility(View.GONE);
statusView.setText(R.string.msg_default_status);
statusView.setVisibility(View.VISIBLE);
viewfinderView.setVisibility(View.VISIBLE);
lastResult = null;
}
public void drawViewfinder() {
viewfinderView.drawViewfinder();
}
}
| true
| true
|
private void handleDecodeInternally(Result rawResult, Bitmap barcode) {
statusView.setVisibility(View.GONE);
viewfinderView.setVisibility(View.GONE);
resultView.setVisibility(View.VISIBLE);
ImageView barcodeImageView = (ImageView) findViewById(R.id.barcode_image_view);
if (barcode == null) {
barcodeImageView.setImageBitmap(BitmapFactory.decodeResource(getResources(),
R.drawable.launcher_icon));
} else {
barcodeImageView.setImageBitmap(barcode);
}
TextView formatTextView = (TextView) findViewById(R.id.format_text_view);
formatTextView.setText(rawResult.getBarcodeFormat().toString());
ResultHandler resultHandler = ResultHandlerFactory.makeResultHandler(this, rawResult);
TextView typeTextView = (TextView) findViewById(R.id.type_text_view);
typeTextView.setText(resultHandler.getType().toString());
DateFormat formatter = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT);
String formattedTime = formatter.format(new Date(rawResult.getTimestamp()));
TextView timeTextView = (TextView) findViewById(R.id.time_text_view);
timeTextView.setText(formattedTime);
TextView metaTextView = (TextView) findViewById(R.id.meta_text_view);
View metaTextViewLabel = findViewById(R.id.meta_text_view_label);
metaTextView.setVisibility(View.GONE);
metaTextViewLabel.setVisibility(View.GONE);
Map<ResultMetadataType,Object> metadata =
(Map<ResultMetadataType,Object>) rawResult.getResultMetadata();
if (metadata != null) {
StringBuilder metadataText = new StringBuilder(20);
for (Map.Entry<ResultMetadataType,Object> entry : metadata.entrySet()) {
if (DISPLAYABLE_METADATA_TYPES.contains(entry.getKey())) {
metadataText.append(entry.getValue()).append('\n');
}
}
if (metadataText.length() > 0) {
metadataText.setLength(metadataText.length() - 1);
metaTextView.setText(metadataText);
metaTextView.setVisibility(View.VISIBLE);
metaTextViewLabel.setVisibility(View.VISIBLE);
}
}
TextView contentsTextView = (TextView) findViewById(R.id.contents_text_view);
CharSequence displayContents = resultHandler.getDisplayContents();
contentsTextView.setText(displayContents);
// Crudely scale betweeen 22 and 32 -- bigger font for shorter text
int scaledSize = Math.max(22, 32 - displayContents.length() / 4);
contentsTextView.setTextSize(TypedValue.COMPLEX_UNIT_SP, scaledSize);
TextView supplementTextView = (TextView) findViewById(R.id.contents_supplement_text_view);
if (supplementTextView != null) {
supplementTextView.setText("");
supplementTextView.setOnClickListener(null);
if (PreferenceManager.getDefaultSharedPreferences(this).getBoolean(PreferencesActivity.KEY_SUPPLEMENTAL, true)) {
SupplementalInfoRetriever.maybeInvokeRetrieval(supplementTextView, resultHandler.getResult(), handler, this);
}
} else {
Log.w(TAG, "Unable to find supplement text view?");
}
int buttonCount = resultHandler.getButtonCount();
ViewGroup buttonView = (ViewGroup) findViewById(R.id.result_button_view);
buttonView.requestFocus();
for (int x = 0; x < ResultHandler.MAX_BUTTON_COUNT; x++) {
TextView button = (TextView) buttonView.getChildAt(x);
if (x < buttonCount) {
button.setVisibility(View.VISIBLE);
button.setText(resultHandler.getButtonText(x));
button.setOnClickListener(new ResultButtonListener(resultHandler, x));
} else {
button.setVisibility(View.GONE);
}
}
if (copyToClipboard) {
ClipboardManager clipboard = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE);
clipboard.setText(displayContents);
}
}
|
private void handleDecodeInternally(Result rawResult, Bitmap barcode) {
statusView.setVisibility(View.GONE);
viewfinderView.setVisibility(View.GONE);
resultView.setVisibility(View.VISIBLE);
ImageView barcodeImageView = (ImageView) findViewById(R.id.barcode_image_view);
if (barcode == null) {
barcodeImageView.setImageBitmap(BitmapFactory.decodeResource(getResources(),
R.drawable.launcher_icon));
} else {
barcodeImageView.setImageBitmap(barcode);
}
TextView formatTextView = (TextView) findViewById(R.id.format_text_view);
formatTextView.setText(rawResult.getBarcodeFormat().toString());
ResultHandler resultHandler = ResultHandlerFactory.makeResultHandler(this, rawResult);
TextView typeTextView = (TextView) findViewById(R.id.type_text_view);
typeTextView.setText(resultHandler.getType().toString());
DateFormat formatter = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT);
String formattedTime = formatter.format(new Date(rawResult.getTimestamp()));
TextView timeTextView = (TextView) findViewById(R.id.time_text_view);
timeTextView.setText(formattedTime);
TextView metaTextView = (TextView) findViewById(R.id.meta_text_view);
View metaTextViewLabel = findViewById(R.id.meta_text_view_label);
metaTextView.setVisibility(View.GONE);
metaTextViewLabel.setVisibility(View.GONE);
Map<ResultMetadataType,Object> metadata =
(Map<ResultMetadataType,Object>) rawResult.getResultMetadata();
if (metadata != null) {
StringBuilder metadataText = new StringBuilder(20);
for (Map.Entry<ResultMetadataType,Object> entry : metadata.entrySet()) {
if (DISPLAYABLE_METADATA_TYPES.contains(entry.getKey())) {
metadataText.append(entry.getValue()).append('\n');
}
}
if (metadataText.length() > 0) {
metadataText.setLength(metadataText.length() - 1);
metaTextView.setText(metadataText);
metaTextView.setVisibility(View.VISIBLE);
metaTextViewLabel.setVisibility(View.VISIBLE);
}
}
TextView contentsTextView = (TextView) findViewById(R.id.contents_text_view);
CharSequence displayContents = resultHandler.getDisplayContents();
contentsTextView.setText(displayContents);
// Crudely scale betweeen 22 and 32 -- bigger font for shorter text
int scaledSize = Math.max(22, 32 - displayContents.length() / 4);
contentsTextView.setTextSize(TypedValue.COMPLEX_UNIT_SP, scaledSize);
TextView supplementTextView = (TextView) findViewById(R.id.contents_supplement_text_view);
supplementTextView.setText("");
supplementTextView.setOnClickListener(null);
if (PreferenceManager.getDefaultSharedPreferences(this).getBoolean(PreferencesActivity.KEY_SUPPLEMENTAL, true)) {
SupplementalInfoRetriever.maybeInvokeRetrieval(supplementTextView, resultHandler.getResult(), handler, this);
}
int buttonCount = resultHandler.getButtonCount();
ViewGroup buttonView = (ViewGroup) findViewById(R.id.result_button_view);
buttonView.requestFocus();
for (int x = 0; x < ResultHandler.MAX_BUTTON_COUNT; x++) {
TextView button = (TextView) buttonView.getChildAt(x);
if (x < buttonCount) {
button.setVisibility(View.VISIBLE);
button.setText(resultHandler.getButtonText(x));
button.setOnClickListener(new ResultButtonListener(resultHandler, x));
} else {
button.setVisibility(View.GONE);
}
}
if (copyToClipboard) {
ClipboardManager clipboard = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE);
clipboard.setText(displayContents);
}
}
|
diff --git a/src/org/mozilla/javascript/Interpreter.java b/src/org/mozilla/javascript/Interpreter.java
index 3f0db7a6..5ef61c42 100644
--- a/src/org/mozilla/javascript/Interpreter.java
+++ b/src/org/mozilla/javascript/Interpreter.java
@@ -1,4117 +1,4120 @@
/* -*- Mode: java; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-2000
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Patrick Beard
* Norris Boyd
* Igor Bukanov
* Ethan Hugg
* Terry Lucas
* Roger Lawrence
* Milen Nankov
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package org.mozilla.javascript;
import java.io.PrintStream;
import java.io.Serializable;
import org.mozilla.javascript.continuations.Continuation;
import org.mozilla.javascript.debug.DebugFrame;
public class Interpreter
{
// Additional interpreter-specific codes
private static final int
// Stack: ... value1 -> ... value1 value1
Icode_DUP = -1,
// Stack: ... value2 value1 -> ... value2 value1 value2 value1
Icode_DUP2 = -2,
// Stack: ... value2 value1 -> ... value1 value2
Icode_SWAP = -3,
// Stack: ... value1 -> ...
Icode_POP = -4,
// Store stack top into return register and then pop it
Icode_POP_RESULT = -5,
// To jump conditionally and pop additional stack value
Icode_IFEQ_POP = -6,
// various types of ++/--
Icode_VAR_INC_DEC = -7,
Icode_NAME_INC_DEC = -8,
Icode_PROP_INC_DEC = -9,
Icode_ELEM_INC_DEC = -10,
Icode_REF_INC_DEC = -11,
// load/save scope from/to local
Icode_SCOPE_LOAD = -12,
Icode_SCOPE_SAVE = -13,
Icode_TYPEOFNAME = -14,
// helper for function calls
Icode_NAME_AND_THIS = -15,
Icode_PROP_AND_THIS = -16,
Icode_ELEM_AND_THIS = -17,
Icode_VALUE_AND_THIS = -18,
// Create closure object for nested functions
Icode_CLOSURE_EXPR = -19,
Icode_CLOSURE_STMT = -20,
// Special calls
Icode_CALLSPECIAL = -21,
// To return undefined value
Icode_RETUNDEF = -22,
// Exception handling implementation
Icode_GOSUB = -23,
Icode_STARTSUB = -24,
Icode_RETSUB = -25,
// To indicating a line number change in icodes.
Icode_LINE = -26,
// To store shorts and ints inline
Icode_SHORTNUMBER = -27,
Icode_INTNUMBER = -28,
// To create and populate array to hold values for [] and {} literals
Icode_LITERAL_NEW = -29,
Icode_LITERAL_SET = -30,
// Array literal with skipped index like [1,,2]
Icode_SPARE_ARRAYLIT = -31,
// Load index register to prepare for the following index operation
Icode_REG_IND_C0 = -32,
Icode_REG_IND_C1 = -33,
Icode_REG_IND_C2 = -34,
Icode_REG_IND_C3 = -35,
Icode_REG_IND_C4 = -36,
Icode_REG_IND_C5 = -37,
Icode_REG_IND1 = -38,
Icode_REG_IND2 = -39,
Icode_REG_IND4 = -40,
// Load string register to prepare for the following string operation
Icode_REG_STR_C0 = -41,
Icode_REG_STR_C1 = -42,
Icode_REG_STR_C2 = -43,
Icode_REG_STR_C3 = -44,
Icode_REG_STR1 = -45,
Icode_REG_STR2 = -46,
Icode_REG_STR4 = -47,
// Version of getvar/setvar that read var index directly from bytecode
Icode_GETVAR1 = -48,
Icode_SETVAR1 = -49,
// Load unefined
Icode_UNDEF = -50,
Icode_ZERO = -51,
Icode_ONE = -52,
// entrance and exit from .()
Icode_ENTERDQ = -53,
Icode_LEAVEDQ = -54,
Icode_TAIL_CALL = -55,
// Clear local to allow GC its context
Icode_LOCAL_CLEAR = -56,
// Last icode
MIN_ICODE = -56;
// data for parsing
private CompilerEnvirons compilerEnv;
private boolean itsInFunctionFlag;
private InterpreterData itsData;
private ScriptOrFnNode scriptOrFn;
private int itsICodeTop;
private int itsStackDepth;
private int itsLineNumber;
private int itsDoubleTableTop;
private ObjToIntMap itsStrings = new ObjToIntMap(20);
private int itsLocalTop;
private static final int MIN_LABEL_TABLE_SIZE = 32;
private static final int MIN_FIXUP_TABLE_SIZE = 40;
private int[] itsLabelTable;
private int itsLabelTableTop;
// itsFixupTable[i] = (label_index << 32) | fixup_site
private long[] itsFixupTable;
private int itsFixupTableTop;
private ObjArray itsLiteralIds = new ObjArray();
private int itsExceptionTableTop;
private static final int EXCEPTION_TRY_START_SLOT = 0;
private static final int EXCEPTION_TRY_END_SLOT = 1;
private static final int EXCEPTION_HANDLER_SLOT = 2;
private static final int EXCEPTION_TYPE_SLOT = 3;
private static final int EXCEPTION_LOCAL_SLOT = 4;
private static final int EXCEPTION_SCOPE_SLOT = 5;
// SLOT_SIZE: space for try start/end, handler, start, handler type,
// exception local and scope local
private static final int EXCEPTION_SLOT_SIZE = 6;
// ECF_ or Expression Context Flags constants: for now only TAIL is available
private static final int ECF_TAIL = 1 << 0;
/**
* Class to hold data corresponding to one interpreted call stack frame.
*/
private static class CallFrame implements Cloneable, Serializable
{
static final long serialVersionUID = -2843792508994958978L;
CallFrame parentFrame;
// amount of stack frames before this one on the interpretation stack
int frameIndex;
// If true indicates read-only frame that is a part of continuation
boolean frozen;
InterpretedFunction fnOrScript;
InterpreterData idata;
// Stack structure
// stack[0 <= i < localShift]: arguments and local variables
// stack[localShift <= i <= emptyStackTop]: used for local temporaries
// stack[emptyStackTop < i < stack.length]: stack data
// sDbl[i]: if stack[i] is UniqueTag.DOUBLE_MARK, sDbl[i] holds the number value
Object[] stack;
double[] sDbl;
CallFrame varSource; // defaults to this unless continuation frame
int localShift;
int emptyStackTop;
DebugFrame debuggerFrame;
boolean useActivation;
Scriptable thisObj;
Scriptable[] scriptRegExps;
// The values that change during interpretation
Object result;
double resultDbl;
int pc;
int pcPrevBranch;
int pcSourceLineStart;
Scriptable scope;
int savedStackTop;
int savedCallOp;
CallFrame cloneFrozen()
{
if (!frozen) Kit.codeBug();
CallFrame copy;
try {
copy = (CallFrame)clone();
} catch (CloneNotSupportedException ex) {
throw new IllegalStateException();
}
// clone stack but keep varSource to point to values
// from this frame to share variables.
copy.stack = (Object[])stack.clone();
copy.sDbl = (double[])sDbl.clone();
copy.frozen = false;
return copy;
}
}
private static final class ContinuationJump implements Serializable
{
static final long serialVersionUID = 7687739156004308247L;
CallFrame capturedFrame;
CallFrame branchFrame;
Object result;
double resultDbl;
ContinuationJump(Continuation c, CallFrame current)
{
this.capturedFrame = (CallFrame)c.getImplementation();
if (this.capturedFrame == null || current == null) {
// Continuation and current execution does not share
// any frames if there is nothing to capture or
// if there is no currently executed frames
this.branchFrame = null;
} else {
// Search for branch frame where parent frame chains starting
// from captured and current meet.
CallFrame chain1 = this.capturedFrame;
CallFrame chain2 = current;
// First work parents of chain1 or chain2 until the same
// frame depth.
int diff = chain1.frameIndex - chain2.frameIndex;
if (diff != 0) {
if (diff < 0) {
// swap to make sure that
// chain1.frameIndex > chain2.frameIndex and diff > 0
chain1 = current;
chain2 = this.capturedFrame;
diff = -diff;
}
do {
chain1 = chain1.parentFrame;
} while (--diff != 0);
if (chain1.frameIndex != chain2.frameIndex) Kit.codeBug();
}
// Now walk parents in parallel until a shared frame is found
// or until the root is reached.
while (chain1 != chain2 && chain1 != null) {
chain1 = chain1.parentFrame;
chain2 = chain2.parentFrame;
}
this.branchFrame = chain1;
if (this.branchFrame != null && !this.branchFrame.frozen)
Kit.codeBug();
}
}
}
static {
// Checks for byte code consistencies, good compiler can eliminate them
if (Token.LAST_BYTECODE_TOKEN > 127) {
String str = "Violation of Token.LAST_BYTECODE_TOKEN <= 127";
System.err.println(str);
throw new IllegalStateException(str);
}
if (MIN_ICODE < -128) {
String str = "Violation of Interpreter.MIN_ICODE >= -128";
System.err.println(str);
throw new IllegalStateException(str);
}
}
private static String bytecodeName(int bytecode)
{
if (!validBytecode(bytecode)) {
throw new IllegalArgumentException(String.valueOf(bytecode));
}
if (!Token.printICode) {
return String.valueOf(bytecode);
}
if (validTokenCode(bytecode)) {
return Token.name(bytecode);
}
switch (bytecode) {
case Icode_DUP: return "DUP";
case Icode_DUP2: return "DUP2";
case Icode_SWAP: return "SWAP";
case Icode_POP: return "POP";
case Icode_POP_RESULT: return "POP_RESULT";
case Icode_IFEQ_POP: return "IFEQ_POP";
case Icode_VAR_INC_DEC: return "VAR_INC_DEC";
case Icode_NAME_INC_DEC: return "NAME_INC_DEC";
case Icode_PROP_INC_DEC: return "PROP_INC_DEC";
case Icode_ELEM_INC_DEC: return "ELEM_INC_DEC";
case Icode_REF_INC_DEC: return "REF_INC_DEC";
case Icode_SCOPE_LOAD: return "SCOPE_LOAD";
case Icode_SCOPE_SAVE: return "SCOPE_SAVE";
case Icode_TYPEOFNAME: return "TYPEOFNAME";
case Icode_NAME_AND_THIS: return "NAME_AND_THIS";
case Icode_PROP_AND_THIS: return "PROP_AND_THIS";
case Icode_ELEM_AND_THIS: return "ELEM_AND_THIS";
case Icode_VALUE_AND_THIS: return "VALUE_AND_THIS";
case Icode_CLOSURE_EXPR: return "CLOSURE_EXPR";
case Icode_CLOSURE_STMT: return "CLOSURE_STMT";
case Icode_CALLSPECIAL: return "CALLSPECIAL";
case Icode_RETUNDEF: return "RETUNDEF";
case Icode_GOSUB: return "GOSUB";
case Icode_STARTSUB: return "STARTSUB";
case Icode_RETSUB: return "RETSUB";
case Icode_LINE: return "LINE";
case Icode_SHORTNUMBER: return "SHORTNUMBER";
case Icode_INTNUMBER: return "INTNUMBER";
case Icode_LITERAL_NEW: return "LITERAL_NEW";
case Icode_LITERAL_SET: return "LITERAL_SET";
case Icode_SPARE_ARRAYLIT: return "SPARE_ARRAYLIT";
case Icode_REG_IND_C0: return "REG_IND_C0";
case Icode_REG_IND_C1: return "REG_IND_C1";
case Icode_REG_IND_C2: return "REG_IND_C2";
case Icode_REG_IND_C3: return "REG_IND_C3";
case Icode_REG_IND_C4: return "REG_IND_C4";
case Icode_REG_IND_C5: return "REG_IND_C5";
case Icode_REG_IND1: return "LOAD_IND1";
case Icode_REG_IND2: return "LOAD_IND2";
case Icode_REG_IND4: return "LOAD_IND4";
case Icode_REG_STR_C0: return "REG_STR_C0";
case Icode_REG_STR_C1: return "REG_STR_C1";
case Icode_REG_STR_C2: return "REG_STR_C2";
case Icode_REG_STR_C3: return "REG_STR_C3";
case Icode_REG_STR1: return "LOAD_STR1";
case Icode_REG_STR2: return "LOAD_STR2";
case Icode_REG_STR4: return "LOAD_STR4";
case Icode_GETVAR1: return "GETVAR1";
case Icode_SETVAR1: return "SETVAR1";
case Icode_UNDEF: return "UNDEF";
case Icode_ZERO: return "ZERO";
case Icode_ONE: return "ONE";
case Icode_ENTERDQ: return "ENTERDQ";
case Icode_LEAVEDQ: return "LEAVEDQ";
case Icode_TAIL_CALL: return "TAIL_CALL";
case Icode_LOCAL_CLEAR: return "LOCAL_CLEAR";
}
// icode without name
throw new IllegalStateException(String.valueOf(bytecode));
}
private static boolean validIcode(int icode)
{
return MIN_ICODE <= icode && icode <= -1;
}
private static boolean validTokenCode(int token)
{
return Token.FIRST_BYTECODE_TOKEN <= token
&& token <= Token.LAST_BYTECODE_TOKEN;
}
private static boolean validBytecode(int bytecode)
{
return validIcode(bytecode) || validTokenCode(bytecode);
}
public Object compile(CompilerEnvirons compilerEnv,
ScriptOrFnNode tree,
String encodedSource,
boolean returnFunction)
{
this.compilerEnv = compilerEnv;
new NodeTransformer().transform(tree);
if (Token.printTrees) {
System.out.println(tree.toStringTree(tree));
}
if (returnFunction) {
tree = tree.getFunctionNode(0);
}
scriptOrFn = tree;
itsData = new InterpreterData(compilerEnv.getLanguageVersion(),
scriptOrFn.getSourceName(),
encodedSource);
itsData.topLevel = true;
if (returnFunction) {
generateFunctionICode();
} else {
generateICodeFromTree(scriptOrFn);
}
return itsData;
}
public Script createScriptObject(Object bytecode, Object staticSecurityDomain)
{
if(bytecode != itsData)
{
Kit.codeBug();
}
return InterpretedFunction.createScript(itsData,
staticSecurityDomain);
}
public Function createFunctionObject(Context cx, Scriptable scope,
Object bytecode, Object staticSecurityDomain)
{
if(bytecode != itsData)
{
Kit.codeBug();
}
return InterpretedFunction.createFunction(cx, scope, itsData,
staticSecurityDomain);
}
private void generateFunctionICode()
{
itsInFunctionFlag = true;
FunctionNode theFunction = (FunctionNode)scriptOrFn;
itsData.itsFunctionType = theFunction.getFunctionType();
itsData.itsNeedsActivation = theFunction.requiresActivation();
itsData.itsName = theFunction.getFunctionName();
if (!theFunction.getIgnoreDynamicScope()) {
if (compilerEnv.isUseDynamicScope()) {
itsData.useDynamicScope = true;
}
}
generateICodeFromTree(theFunction.getLastChild());
}
private void generateICodeFromTree(Node tree)
{
generateNestedFunctions();
generateRegExpLiterals();
visitStatement(tree);
fixLabelGotos();
// add RETURN_RESULT only to scripts as function always ends with RETURN
if (itsData.itsFunctionType == 0) {
addToken(Token.RETURN_RESULT);
}
if (itsData.itsICode.length != itsICodeTop) {
// Make itsData.itsICode length exactly itsICodeTop to save memory
// and catch bugs with jumps beyound icode as early as possible
byte[] tmp = new byte[itsICodeTop];
System.arraycopy(itsData.itsICode, 0, tmp, 0, itsICodeTop);
itsData.itsICode = tmp;
}
if (itsStrings.size() == 0) {
itsData.itsStringTable = null;
} else {
itsData.itsStringTable = new String[itsStrings.size()];
ObjToIntMap.Iterator iter = itsStrings.newIterator();
for (iter.start(); !iter.done(); iter.next()) {
String str = (String)iter.getKey();
int index = iter.getValue();
if (itsData.itsStringTable[index] != null) Kit.codeBug();
itsData.itsStringTable[index] = str;
}
}
if (itsDoubleTableTop == 0) {
itsData.itsDoubleTable = null;
} else if (itsData.itsDoubleTable.length != itsDoubleTableTop) {
double[] tmp = new double[itsDoubleTableTop];
System.arraycopy(itsData.itsDoubleTable, 0, tmp, 0,
itsDoubleTableTop);
itsData.itsDoubleTable = tmp;
}
if (itsExceptionTableTop != 0
&& itsData.itsExceptionTable.length != itsExceptionTableTop)
{
int[] tmp = new int[itsExceptionTableTop];
System.arraycopy(itsData.itsExceptionTable, 0, tmp, 0,
itsExceptionTableTop);
itsData.itsExceptionTable = tmp;
}
itsData.itsMaxVars = scriptOrFn.getParamAndVarCount();
// itsMaxFrameArray: interpret method needs this amount for its
// stack and sDbl arrays
itsData.itsMaxFrameArray = itsData.itsMaxVars
+ itsData.itsMaxLocals
+ itsData.itsMaxStack;
itsData.argNames = scriptOrFn.getParamAndVarNames();
itsData.argCount = scriptOrFn.getParamCount();
itsData.encodedSourceStart = scriptOrFn.getEncodedSourceStart();
itsData.encodedSourceEnd = scriptOrFn.getEncodedSourceEnd();
if (itsLiteralIds.size() != 0) {
itsData.literalIds = itsLiteralIds.toArray();
}
if (Token.printICode) dumpICode(itsData);
}
private void generateNestedFunctions()
{
int functionCount = scriptOrFn.getFunctionCount();
if (functionCount == 0) return;
InterpreterData[] array = new InterpreterData[functionCount];
for (int i = 0; i != functionCount; i++) {
FunctionNode def = scriptOrFn.getFunctionNode(i);
Interpreter jsi = new Interpreter();
jsi.compilerEnv = compilerEnv;
jsi.scriptOrFn = def;
jsi.itsData = new InterpreterData(itsData);
jsi.generateFunctionICode();
array[i] = jsi.itsData;
}
itsData.itsNestedFunctions = array;
}
private void generateRegExpLiterals()
{
int N = scriptOrFn.getRegexpCount();
if (N == 0) return;
Context cx = Context.getContext();
RegExpProxy rep = ScriptRuntime.checkRegExpProxy(cx);
Object[] array = new Object[N];
for (int i = 0; i != N; i++) {
String string = scriptOrFn.getRegexpString(i);
String flags = scriptOrFn.getRegexpFlags(i);
array[i] = rep.compileRegExp(cx, string, flags);
}
itsData.itsRegExpLiterals = array;
}
private void updateLineNumber(Node node)
{
int lineno = node.getLineno();
if (lineno != itsLineNumber && lineno >= 0) {
if (itsData.firstLinePC < 0) {
itsData.firstLinePC = lineno;
}
itsLineNumber = lineno;
addIcode(Icode_LINE);
addUint16(lineno & 0xFFFF);
}
}
private RuntimeException badTree(Node node)
{
throw new RuntimeException(node.toString());
}
private void visitStatement(Node node)
{
int type = node.getType();
Node child = node.getFirstChild();
switch (type) {
case Token.FUNCTION:
{
int fnIndex = node.getExistingIntProp(Node.FUNCTION_PROP);
int fnType = scriptOrFn.getFunctionNode(fnIndex).
getFunctionType();
// Only function expressions or function expression
// statements needs closure code creating new function
// object on stack as function statements are initialized
// at script/function start
// In addition function expression can not present here
// at statement level, they must only present as expressions.
if (fnType == FunctionNode.FUNCTION_EXPRESSION_STATEMENT) {
addIndexOp(Icode_CLOSURE_STMT, fnIndex);
} else {
if (fnType != FunctionNode.FUNCTION_STATEMENT) {
throw Kit.codeBug();
}
}
}
break;
case Token.SCRIPT:
case Token.LABEL:
case Token.LOOP:
case Token.BLOCK:
case Token.EMPTY:
case Token.WITH:
updateLineNumber(node);
while (child != null) {
visitStatement(child);
child = child.getNext();
}
break;
case Token.ENTERWITH:
visitExpression(child, 0);
addToken(Token.ENTERWITH);
stackChange(-1);
break;
case Token.LEAVEWITH:
addToken(Token.LEAVEWITH);
break;
case Token.LOCAL_BLOCK:
{
int local = allocLocal();
node.putIntProp(Node.LOCAL_PROP, local);
updateLineNumber(node);
while (child != null) {
visitStatement(child);
child = child.getNext();
}
addIndexOp(Icode_LOCAL_CLEAR, local);
releaseLocal(local);
}
break;
case Token.SWITCH:
updateLineNumber(node);
// See comments in IRFactory.createSwitch() for description
// of SWITCH node
{
visitExpression(child, 0);
for (Node.Jump caseNode = (Node.Jump)child.getNext();
caseNode != null;
caseNode = (Node.Jump)caseNode.getNext())
{
if (caseNode.getType() != Token.CASE)
throw badTree(caseNode);
Node test = caseNode.getFirstChild();
addIcode(Icode_DUP);
stackChange(1);
visitExpression(test, 0);
addToken(Token.SHEQ);
stackChange(-1);
// If true, Icode_IFEQ_POP will jump and remove case
// value from stack
addGoto(caseNode.target, Icode_IFEQ_POP);
stackChange(-1);
}
addIcode(Icode_POP);
stackChange(-1);
}
break;
case Token.TARGET:
markTargetLabel(node);
break;
case Token.IFEQ :
case Token.IFNE :
{
Node target = ((Node.Jump)node).target;
visitExpression(child, 0);
addGoto(target, type);
stackChange(-1);
}
break;
case Token.GOTO:
{
Node target = ((Node.Jump)node).target;
addGoto(target, type);
}
break;
case Token.JSR:
{
Node target = ((Node.Jump)node).target;
addGoto(target, Icode_GOSUB);
}
break;
case Token.FINALLY:
{
// Account for incomming GOTOSUB address
stackChange(1);
int finallyRegister = getLocalBlockRef(node);
addIndexOp(Icode_STARTSUB, finallyRegister);
stackChange(-1);
while (child != null) {
visitStatement(child);
child = child.getNext();
}
addIndexOp(Icode_RETSUB, finallyRegister);
}
break;
case Token.EXPR_VOID:
case Token.EXPR_RESULT:
updateLineNumber(node);
visitExpression(child, 0);
addIcode((type == Token.EXPR_VOID) ? Icode_POP : Icode_POP_RESULT);
stackChange(-1);
break;
case Token.TRY:
{
Node.Jump tryNode = (Node.Jump)node;
int exceptionObjectLocal = getLocalBlockRef(tryNode);
int scopeLocal = allocLocal();
addIndexOp(Icode_SCOPE_SAVE, scopeLocal);
int tryStart = itsICodeTop;
while (child != null) {
visitStatement(child);
child = child.getNext();
}
Node catchTarget = tryNode.target;
if (catchTarget != null) {
int catchStartPC
= itsLabelTable[getTargetLabel(catchTarget)];
addExceptionHandler(
tryStart, catchStartPC, catchStartPC,
false, exceptionObjectLocal, scopeLocal);
}
Node finallyTarget = tryNode.getFinally();
if (finallyTarget != null) {
int finallyStartPC
= itsLabelTable[getTargetLabel(finallyTarget)];
addExceptionHandler(
tryStart, finallyStartPC, finallyStartPC,
true, exceptionObjectLocal, scopeLocal);
}
addIndexOp(Icode_LOCAL_CLEAR, scopeLocal);
releaseLocal(scopeLocal);
}
break;
case Token.CATCH_SCOPE:
{
int localIndex = getLocalBlockRef(node);
int scopeIndex = node.getExistingIntProp(Node.CATCH_SCOPE_PROP);
String name = child.getString();
child = child.getNext();
visitExpression(child, 0); // load expression object
addStringPrefix(name);
addIndexPrefix(localIndex);
addToken(Token.CATCH_SCOPE);
addUint8(scopeIndex != 0 ? 1 : 0);
stackChange(-1);
}
break;
case Token.THROW:
updateLineNumber(node);
visitExpression(child, 0);
addToken(Token.THROW);
addUint16(itsLineNumber & 0xFFFF);
stackChange(-1);
break;
case Token.RETHROW:
updateLineNumber(node);
addIndexOp(Token.RETHROW, getLocalBlockRef(node));
break;
case Token.RETURN:
updateLineNumber(node);
if (child != null) {
visitExpression(child, ECF_TAIL);
addToken(Token.RETURN);
stackChange(-1);
} else {
addIcode(Icode_RETUNDEF);
}
break;
case Token.RETURN_RESULT:
updateLineNumber(node);
addToken(Token.RETURN_RESULT);
break;
case Token.ENUM_INIT_KEYS:
case Token.ENUM_INIT_VALUES :
visitExpression(child, 0);
addIndexOp(type, getLocalBlockRef(node));
stackChange(-1);
break;
default:
throw badTree(node);
}
if (itsStackDepth != 0) {
throw Kit.codeBug();
}
}
private void visitExpression(Node node, int contextFlags)
{
int type = node.getType();
Node child = node.getFirstChild();
int savedStackDepth = itsStackDepth;
switch (type) {
case Token.FUNCTION:
{
int fnIndex = node.getExistingIntProp(Node.FUNCTION_PROP);
FunctionNode fn = scriptOrFn.getFunctionNode(fnIndex);
// See comments in visitStatement for Token.FUNCTION case
if (fn.getFunctionType() != FunctionNode.FUNCTION_EXPRESSION) {
throw Kit.codeBug();
}
addIndexOp(Icode_CLOSURE_EXPR, fnIndex);
stackChange(1);
}
break;
case Token.LOCAL_LOAD:
{
int localIndex = getLocalBlockRef(node);
addIndexOp(Token.LOCAL_LOAD, localIndex);
stackChange(1);
}
break;
case Token.COMMA:
{
Node lastChild = node.getLastChild();
while (child != lastChild) {
visitExpression(child, 0);
addIcode(Icode_POP);
stackChange(-1);
child = child.getNext();
}
// Preserve tail context flag if any
visitExpression(child, contextFlags & ECF_TAIL);
}
break;
case Token.USE_STACK:
// Indicates that stack was modified externally,
// like placed catch object
stackChange(1);
break;
case Token.REF_CALL:
case Token.CALL:
case Token.NEW:
{
if (type == Token.NEW) {
visitExpression(child, 0);
} else {
generateCallFunAndThis(child);
}
int argCount = 0;
while ((child = child.getNext()) != null) {
visitExpression(child, 0);
++argCount;
}
int callType = node.getIntProp(Node.SPECIALCALL_PROP,
Node.NON_SPECIALCALL);
if (callType != Node.NON_SPECIALCALL) {
// embed line number and source filename
addIndexOp(Icode_CALLSPECIAL, argCount);
addUint8(callType);
addUint8(type == Token.NEW ? 1 : 0);
addUint16(itsLineNumber & 0xFFFF);
} else {
if (type == Token.CALL) {
if ((contextFlags & ECF_TAIL) != 0) {
type = Icode_TAIL_CALL;
}
}
addIndexOp(type, argCount);
}
// adjust stack
if (type == Token.NEW) {
// new: f, args -> result
stackChange(-argCount);
} else {
// call: f, thisObj, args -> result
// ref_call: f, thisObj, args -> ref
stackChange(-1 - argCount);
}
if (argCount > itsData.itsMaxCalleeArgs) {
itsData.itsMaxCalleeArgs = argCount;
}
}
break;
case Token.AND:
case Token.OR:
{
visitExpression(child, 0);
addIcode(Icode_DUP);
stackChange(1);
int afterSecondJumpStart = itsICodeTop;
int jump = (type == Token.AND) ? Token.IFNE : Token.IFEQ;
addGotoOp(jump);
stackChange(-1);
addIcode(Icode_POP);
stackChange(-1);
child = child.getNext();
// Preserve tail context flag if any
visitExpression(child, contextFlags & ECF_TAIL);
resolveForwardGoto(afterSecondJumpStart);
}
break;
case Token.HOOK:
{
Node ifThen = child.getNext();
Node ifElse = ifThen.getNext();
visitExpression(child, 0);
int elseJumpStart = itsICodeTop;
addGotoOp(Token.IFNE);
stackChange(-1);
// Preserve tail context flag if any
visitExpression(ifThen, contextFlags & ECF_TAIL);
int afterElseJumpStart = itsICodeTop;
addGotoOp(Token.GOTO);
resolveForwardGoto(elseJumpStart);
itsStackDepth = savedStackDepth;
// Preserve tail context flag if any
visitExpression(ifElse, contextFlags & ECF_TAIL);
resolveForwardGoto(afterElseJumpStart);
}
break;
case Token.GETPROP:
visitExpression(child, 0);
child = child.getNext();
addStringOp(Token.GETPROP, child.getString());
break;
case Token.GETELEM:
case Token.DELPROP:
case Token.BITAND:
case Token.BITOR:
case Token.BITXOR:
case Token.LSH:
case Token.RSH:
case Token.URSH:
case Token.ADD:
case Token.SUB:
case Token.MOD:
case Token.DIV:
case Token.MUL:
case Token.EQ:
case Token.NE:
case Token.SHEQ:
case Token.SHNE:
case Token.IN:
case Token.INSTANCEOF:
case Token.LE:
case Token.LT:
case Token.GE:
case Token.GT:
visitExpression(child, 0);
child = child.getNext();
visitExpression(child, 0);
addToken(type);
stackChange(-1);
break;
case Token.POS:
case Token.NEG:
case Token.NOT:
case Token.BITNOT:
case Token.TYPEOF:
case Token.VOID:
visitExpression(child, 0);
if (type == Token.VOID) {
addIcode(Icode_POP);
addIcode(Icode_UNDEF);
} else {
addToken(type);
}
break;
case Token.GET_REF:
case Token.DEL_REF:
visitExpression(child, 0);
addToken(type);
break;
case Token.SETPROP:
case Token.SETPROP_OP:
{
visitExpression(child, 0);
child = child.getNext();
String property = child.getString();
child = child.getNext();
if (type == Token.SETPROP_OP) {
addIcode(Icode_DUP);
stackChange(1);
addStringOp(Token.GETPROP, property);
// Compensate for the following USE_STACK
stackChange(-1);
}
visitExpression(child, 0);
addStringOp(Token.SETPROP, property);
stackChange(-1);
}
break;
case Token.SETELEM:
case Token.SETELEM_OP:
visitExpression(child, 0);
child = child.getNext();
visitExpression(child, 0);
child = child.getNext();
if (type == Token.SETELEM_OP) {
addIcode(Icode_DUP2);
stackChange(2);
addToken(Token.GETELEM);
stackChange(-1);
// Compensate for the following USE_STACK
stackChange(-1);
}
visitExpression(child, 0);
addToken(Token.SETELEM);
stackChange(-2);
break;
case Token.SET_REF:
case Token.SET_REF_OP:
visitExpression(child, 0);
child = child.getNext();
if (type == Token.SET_REF_OP) {
addIcode(Icode_DUP);
stackChange(1);
addToken(Token.GET_REF);
// Compensate for the following USE_STACK
stackChange(-1);
}
visitExpression(child, 0);
addToken(Token.SET_REF);
stackChange(-1);
break;
case Token.SETNAME:
{
String name = child.getString();
visitExpression(child, 0);
child = child.getNext();
visitExpression(child, 0);
addStringOp(Token.SETNAME, name);
stackChange(-1);
}
break;
case Token.TYPEOFNAME:
{
String name = node.getString();
int index = -1;
// use typeofname if an activation frame exists
// since the vars all exist there instead of in jregs
if (itsInFunctionFlag && !itsData.itsNeedsActivation)
index = scriptOrFn.getParamOrVarIndex(name);
if (index == -1) {
addStringOp(Icode_TYPEOFNAME, name);
stackChange(1);
} else {
addVarOp(Token.GETVAR, index);
stackChange(1);
addToken(Token.TYPEOF);
}
}
break;
case Token.BINDNAME:
case Token.NAME:
case Token.STRING:
addStringOp(type, node.getString());
stackChange(1);
break;
case Token.INC:
case Token.DEC:
visitIncDec(node, child);
break;
case Token.NUMBER:
{
double num = node.getDouble();
int inum = (int)num;
if (inum == num) {
if (inum == 0) {
addIcode(Icode_ZERO);
// Check for negative zero
if (1.0 / num < 0.0) {
addToken(Token.NEG);
}
} else if (inum == 1) {
addIcode(Icode_ONE);
} else if ((short)inum == inum) {
addIcode(Icode_SHORTNUMBER);
// write short as uin16 bit pattern
addUint16(inum & 0xFFFF);
} else {
addIcode(Icode_INTNUMBER);
addInt(inum);
}
} else {
int index = getDoubleIndex(num);
addIndexOp(Token.NUMBER, index);
}
stackChange(1);
}
break;
case Token.GETVAR:
{
if (itsData.itsNeedsActivation) Kit.codeBug();
String name = node.getString();
int index = scriptOrFn.getParamOrVarIndex(name);
addVarOp(Token.GETVAR, index);
stackChange(1);
}
break;
case Token.SETVAR:
{
if (itsData.itsNeedsActivation) Kit.codeBug();
String name = child.getString();
child = child.getNext();
visitExpression(child, 0);
int index = scriptOrFn.getParamOrVarIndex(name);
addVarOp(Token.SETVAR, index);
}
break;
case Token.NULL:
case Token.THIS:
case Token.THISFN:
case Token.FALSE:
case Token.TRUE:
addToken(type);
stackChange(1);
break;
case Token.ENUM_NEXT:
case Token.ENUM_ID:
addIndexOp(type, getLocalBlockRef(node));
stackChange(1);
break;
case Token.REGEXP:
{
int index = node.getExistingIntProp(Node.REGEXP_PROP);
addIndexOp(Token.REGEXP, index);
stackChange(1);
}
break;
case Token.ARRAYLIT:
case Token.OBJECTLIT:
visitLiteral(node, child);
break;
case Token.REF_SPECIAL:
visitExpression(child, 0);
addStringOp(type, (String)node.getProp(Node.NAME_PROP));
break;
case Token.REF_MEMBER:
case Token.REF_NS_MEMBER:
case Token.REF_NAME:
case Token.REF_NS_NAME:
{
int memberTypeFlags = node.getIntProp(Node.MEMBER_TYPE_PROP, 0);
// generate possible target, possible namespace and member
int childCount = 0;
do {
visitExpression(child, 0);
++childCount;
child = child.getNext();
} while (child != null);
addIndexOp(type, memberTypeFlags);
stackChange(1 - childCount);
}
break;
case Token.DOTQUERY:
{
int queryPC;
updateLineNumber(node);
visitExpression(child, 0);
addIcode(Icode_ENTERDQ);
stackChange(-1);
queryPC = itsICodeTop;
visitExpression(child.getNext(), 0);
addBackwardGoto(Icode_LEAVEDQ, queryPC);
}
break;
case Token.DEFAULTNAMESPACE :
case Token.ESCXMLATTR :
case Token.ESCXMLTEXT :
visitExpression(child, 0);
addToken(type);
break;
default:
throw badTree(node);
}
if (savedStackDepth + 1 != itsStackDepth) {
Kit.codeBug();
}
}
private void generateCallFunAndThis(Node left)
{
// Generate code to place on stack function and thisObj
int type = left.getType();
switch (type) {
case Token.NAME: {
String name = left.getString();
// stack: ... -> ... function thisObj
addStringOp(Icode_NAME_AND_THIS, name);
stackChange(2);
break;
}
case Token.GETPROP:
case Token.GETELEM: {
Node target = left.getFirstChild();
visitExpression(target, 0);
Node id = target.getNext();
if (type == Token.GETPROP) {
String property = id.getString();
// stack: ... target -> ... function thisObj
addStringOp(Icode_PROP_AND_THIS, property);
stackChange(1);
} else {
visitExpression(id, 0);
// stack: ... target id -> ... function thisObj
addIcode(Icode_ELEM_AND_THIS);
}
break;
}
default:
// Including Token.GETVAR
visitExpression(left, 0);
// stack: ... value -> ... function thisObj
addIcode(Icode_VALUE_AND_THIS);
stackChange(1);
break;
}
}
private void visitIncDec(Node node, Node child)
{
int incrDecrMask = node.getExistingIntProp(Node.INCRDECR_PROP);
int childType = child.getType();
switch (childType) {
case Token.GETVAR : {
if (itsData.itsNeedsActivation) Kit.codeBug();
String name = child.getString();
int i = scriptOrFn.getParamOrVarIndex(name);
addVarOp(Icode_VAR_INC_DEC, i);
addUint8(incrDecrMask);
stackChange(1);
break;
}
case Token.NAME : {
String name = child.getString();
addStringOp(Icode_NAME_INC_DEC, name);
addUint8(incrDecrMask);
stackChange(1);
break;
}
case Token.GETPROP : {
Node object = child.getFirstChild();
visitExpression(object, 0);
String property = object.getNext().getString();
addStringOp(Icode_PROP_INC_DEC, property);
addUint8(incrDecrMask);
break;
}
case Token.GETELEM : {
Node object = child.getFirstChild();
visitExpression(object, 0);
Node index = object.getNext();
visitExpression(index, 0);
addIcode(Icode_ELEM_INC_DEC);
addUint8(incrDecrMask);
stackChange(-1);
break;
}
case Token.GET_REF : {
Node ref = child.getFirstChild();
visitExpression(ref, 0);
addIcode(Icode_REF_INC_DEC);
addUint8(incrDecrMask);
break;
}
default : {
throw badTree(node);
}
}
}
private void visitLiteral(Node node, Node child)
{
int type = node.getType();
int count;
Object[] propertyIds = null;
if (type == Token.ARRAYLIT) {
count = 0;
for (Node n = child; n != null; n = n.getNext()) {
++count;
}
} else if (type == Token.OBJECTLIT) {
propertyIds = (Object[])node.getProp(Node.OBJECT_IDS_PROP);
count = propertyIds.length;
} else {
throw badTree(node);
}
addIndexOp(Icode_LITERAL_NEW, count);
stackChange(1);
while (child != null) {
visitExpression(child, 0);
addIcode(Icode_LITERAL_SET);
stackChange(-1);
child = child.getNext();
}
if (type == Token.ARRAYLIT) {
int[] skipIndexes = (int[])node.getProp(Node.SKIP_INDEXES_PROP);
if (skipIndexes == null) {
addToken(Token.ARRAYLIT);
} else {
int index = itsLiteralIds.size();
itsLiteralIds.add(skipIndexes);
addIndexOp(Icode_SPARE_ARRAYLIT, index);
}
} else {
int index = itsLiteralIds.size();
itsLiteralIds.add(propertyIds);
addIndexOp(Token.OBJECTLIT, index);
}
}
private int getLocalBlockRef(Node node)
{
Node localBlock = (Node)node.getProp(Node.LOCAL_BLOCK_PROP);
return localBlock.getExistingIntProp(Node.LOCAL_PROP);
}
private int getTargetLabel(Node target)
{
int label = target.labelId();
if (label != -1) {
return label;
}
label = itsLabelTableTop;
if (itsLabelTable == null || label == itsLabelTable.length) {
if (itsLabelTable == null) {
itsLabelTable = new int[MIN_LABEL_TABLE_SIZE];
}else {
int[] tmp = new int[itsLabelTable.length * 2];
System.arraycopy(itsLabelTable, 0, tmp, 0, label);
itsLabelTable = tmp;
}
}
itsLabelTableTop = label + 1;
itsLabelTable[label] = -1;
target.labelId(label);
return label;
}
private void markTargetLabel(Node target)
{
int label = getTargetLabel(target);
if (itsLabelTable[label] != -1) {
// Can mark label only once
Kit.codeBug();
}
itsLabelTable[label] = itsICodeTop;
}
private void addGoto(Node target, int gotoOp)
{
int label = getTargetLabel(target);
if (!(label < itsLabelTableTop)) Kit.codeBug();
int targetPC = itsLabelTable[label];
if (targetPC != -1) {
addBackwardGoto(gotoOp, targetPC);
} else {
int gotoPC = itsICodeTop;
addGotoOp(gotoOp);
int top = itsFixupTableTop;
if (itsFixupTable == null || top == itsFixupTable.length) {
if (itsFixupTable == null) {
itsFixupTable = new long[MIN_FIXUP_TABLE_SIZE];
} else {
long[] tmp = new long[itsFixupTable.length * 2];
System.arraycopy(itsFixupTable, 0, tmp, 0, top);
itsFixupTable = tmp;
}
}
itsFixupTableTop = top + 1;
itsFixupTable[top] = ((long)label << 32) | gotoPC;
}
}
private void fixLabelGotos()
{
for (int i = 0; i < itsFixupTableTop; i++) {
long fixup = itsFixupTable[i];
int label = (int)(fixup >> 32);
int jumpSource = (int)fixup;
int pc = itsLabelTable[label];
if (pc == -1) {
// Unlocated label
throw Kit.codeBug();
}
resolveGoto(jumpSource, pc);
}
itsFixupTableTop = 0;
}
private void addBackwardGoto(int gotoOp, int jumpPC)
{
int fromPC = itsICodeTop;
// Ensure that this is a jump backward
if (fromPC <= jumpPC) throw Kit.codeBug();
addGotoOp(gotoOp);
resolveGoto(fromPC, jumpPC);
}
private void resolveForwardGoto(int fromPC)
{
// Ensure that forward jump skips at least self bytecode
if (itsICodeTop < fromPC + 3) throw Kit.codeBug();
resolveGoto(fromPC, itsICodeTop);
}
private void resolveGoto(int fromPC, int jumpPC)
{
int offset = jumpPC - fromPC;
// Ensure that jumps do not overlap
if (0 <= offset && offset <= 2) throw Kit.codeBug();
int offsetSite = fromPC + 1;
if (offset != (short)offset) {
if (itsData.longJumps == null) {
itsData.longJumps = new UintMap();
}
itsData.longJumps.put(offsetSite, jumpPC);
offset = 0;
}
byte[] array = itsData.itsICode;
array[offsetSite] = (byte)(offset >> 8);
array[offsetSite + 1] = (byte)offset;
}
private void addToken(int token)
{
if (!validTokenCode(token)) throw Kit.codeBug();
addUint8(token);
}
private void addIcode(int icode)
{
if (!validIcode(icode)) throw Kit.codeBug();
// Write negative icode as uint8 bits
addUint8(icode & 0xFF);
}
private void addUint8(int value)
{
if ((value & ~0xFF) != 0) throw Kit.codeBug();
byte[] array = itsData.itsICode;
int top = itsICodeTop;
if (top == array.length) {
array = increaseICodeCapasity(1);
}
array[top] = (byte)value;
itsICodeTop = top + 1;
}
private void addUint16(int value)
{
if ((value & ~0xFFFF) != 0) throw Kit.codeBug();
byte[] array = itsData.itsICode;
int top = itsICodeTop;
if (top + 2 > array.length) {
array = increaseICodeCapasity(2);
}
array[top] = (byte)(value >>> 8);
array[top + 1] = (byte)value;
itsICodeTop = top + 2;
}
private void addInt(int i)
{
byte[] array = itsData.itsICode;
int top = itsICodeTop;
if (top + 4 > array.length) {
array = increaseICodeCapasity(4);
}
array[top] = (byte)(i >>> 24);
array[top + 1] = (byte)(i >>> 16);
array[top + 2] = (byte)(i >>> 8);
array[top + 3] = (byte)i;
itsICodeTop = top + 4;
}
private int getDoubleIndex(double num)
{
int index = itsDoubleTableTop;
if (index == 0) {
itsData.itsDoubleTable = new double[64];
} else if (itsData.itsDoubleTable.length == index) {
double[] na = new double[index * 2];
System.arraycopy(itsData.itsDoubleTable, 0, na, 0, index);
itsData.itsDoubleTable = na;
}
itsData.itsDoubleTable[index] = num;
itsDoubleTableTop = index + 1;
return index;
}
private void addGotoOp(int gotoOp)
{
byte[] array = itsData.itsICode;
int top = itsICodeTop;
if (top + 3 > array.length) {
array = increaseICodeCapasity(3);
}
array[top] = (byte)gotoOp;
// Offset would written later
itsICodeTop = top + 1 + 2;
}
private void addVarOp(int op, int varIndex)
{
switch (op) {
case Token.GETVAR:
case Token.SETVAR:
if (varIndex < 128) {
addIcode(op == Token.GETVAR ? Icode_GETVAR1 : Icode_SETVAR1);
addUint8(varIndex);
return;
}
// fallthrough
case Icode_VAR_INC_DEC:
addIndexOp(op, varIndex);
return;
}
throw Kit.codeBug();
}
private void addStringOp(int op, String str)
{
addStringPrefix(str);
if (validIcode(op)) {
addIcode(op);
} else {
addToken(op);
}
}
private void addIndexOp(int op, int index)
{
addIndexPrefix(index);
if (validIcode(op)) {
addIcode(op);
} else {
addToken(op);
}
}
private void addStringPrefix(String str)
{
int index = itsStrings.get(str, -1);
if (index == -1) {
index = itsStrings.size();
itsStrings.put(str, index);
}
if (index < 4) {
addIcode(Icode_REG_STR_C0 - index);
} else if (index <= 0xFF) {
addIcode(Icode_REG_STR1);
addUint8(index);
} else if (index <= 0xFFFF) {
addIcode(Icode_REG_STR2);
addUint16(index);
} else {
addIcode(Icode_REG_STR4);
addInt(index);
}
}
private void addIndexPrefix(int index)
{
if (index < 0) Kit.codeBug();
if (index < 6) {
addIcode(Icode_REG_IND_C0 - index);
} else if (index <= 0xFF) {
addIcode(Icode_REG_IND1);
addUint8(index);
} else if (index <= 0xFFFF) {
addIcode(Icode_REG_IND2);
addUint16(index);
} else {
addIcode(Icode_REG_IND4);
addInt(index);
}
}
private void addExceptionHandler(int icodeStart, int icodeEnd,
int handlerStart, boolean isFinally,
int exceptionObjectLocal, int scopeLocal)
{
int top = itsExceptionTableTop;
int[] table = itsData.itsExceptionTable;
if (table == null) {
if (top != 0) Kit.codeBug();
table = new int[EXCEPTION_SLOT_SIZE * 2];
itsData.itsExceptionTable = table;
} else if (table.length == top) {
table = new int[table.length * 2];
System.arraycopy(itsData.itsExceptionTable, 0, table, 0, top);
itsData.itsExceptionTable = table;
}
table[top + EXCEPTION_TRY_START_SLOT] = icodeStart;
table[top + EXCEPTION_TRY_END_SLOT] = icodeEnd;
table[top + EXCEPTION_HANDLER_SLOT] = handlerStart;
table[top + EXCEPTION_TYPE_SLOT] = isFinally ? 1 : 0;
table[top + EXCEPTION_LOCAL_SLOT] = exceptionObjectLocal;
table[top + EXCEPTION_SCOPE_SLOT] = scopeLocal;
itsExceptionTableTop = top + EXCEPTION_SLOT_SIZE;
}
private byte[] increaseICodeCapasity(int extraSize)
{
int capacity = itsData.itsICode.length;
int top = itsICodeTop;
if (top + extraSize <= capacity) throw Kit.codeBug();
capacity *= 2;
if (top + extraSize > capacity) {
capacity = top + extraSize;
}
byte[] array = new byte[capacity];
System.arraycopy(itsData.itsICode, 0, array, 0, top);
itsData.itsICode = array;
return array;
}
private void stackChange(int change)
{
if (change <= 0) {
itsStackDepth += change;
} else {
int newDepth = itsStackDepth + change;
if (newDepth > itsData.itsMaxStack) {
itsData.itsMaxStack = newDepth;
}
itsStackDepth = newDepth;
}
}
private int allocLocal()
{
int localSlot = itsLocalTop;
++itsLocalTop;
if (itsLocalTop > itsData.itsMaxLocals) {
itsData.itsMaxLocals = itsLocalTop;
}
return localSlot;
}
private void releaseLocal(int localSlot)
{
--itsLocalTop;
if (localSlot != itsLocalTop) Kit.codeBug();
}
private static int getShort(byte[] iCode, int pc) {
return (iCode[pc] << 8) | (iCode[pc + 1] & 0xFF);
}
private static int getIndex(byte[] iCode, int pc) {
return ((iCode[pc] & 0xFF) << 8) | (iCode[pc + 1] & 0xFF);
}
private static int getInt(byte[] iCode, int pc) {
return (iCode[pc] << 24) | ((iCode[pc + 1] & 0xFF) << 16)
| ((iCode[pc + 2] & 0xFF) << 8) | (iCode[pc + 3] & 0xFF);
}
private static int getExceptionHandler(CallFrame frame,
boolean onlyFinally)
{
int[] exceptionTable = frame.idata.itsExceptionTable;
if (exceptionTable == null) {
// No exception handlers
return -1;
}
// Icode switch in the interpreter increments PC immediately
// and it is necessary to subtract 1 from the saved PC
// to point it before the start of the next instruction.
int pc = frame.pc - 1;
// OPT: use binary search
int best = -1, bestStart = 0, bestEnd = 0;
for (int i = 0; i != exceptionTable.length; i += EXCEPTION_SLOT_SIZE) {
int start = exceptionTable[i + EXCEPTION_TRY_START_SLOT];
int end = exceptionTable[i + EXCEPTION_TRY_END_SLOT];
if (!(start <= pc && pc < end)) {
continue;
}
if (onlyFinally && exceptionTable[i + EXCEPTION_TYPE_SLOT] != 1) {
continue;
}
if (best >= 0) {
// Since handlers always nest and they never have shared end
// although they can share start it is sufficient to compare
// handlers ends
if (bestEnd < end) {
continue;
}
// Check the above assumption
if (bestStart > start) Kit.codeBug(); // should be nested
if (bestEnd == end) Kit.codeBug(); // no ens sharing
}
best = i;
bestStart = start;
bestEnd = end;
}
return best;
}
private static void dumpICode(InterpreterData idata)
{
if (!Token.printICode) {
return;
}
byte iCode[] = idata.itsICode;
int iCodeLength = iCode.length;
String[] strings = idata.itsStringTable;
PrintStream out = System.out;
out.println("ICode dump, for " + idata.itsName
+ ", length = " + iCodeLength);
out.println("MaxStack = " + idata.itsMaxStack);
int indexReg = 0;
for (int pc = 0; pc < iCodeLength; ) {
out.flush();
out.print(" [" + pc + "] ");
int token = iCode[pc];
int icodeLength = bytecodeSpan(token);
String tname = bytecodeName(token);
int old_pc = pc;
++pc;
switch (token) {
default:
if (icodeLength != 1) Kit.codeBug();
out.println(tname);
break;
case Icode_GOSUB :
case Token.GOTO :
case Token.IFEQ :
case Token.IFNE :
case Icode_IFEQ_POP :
case Icode_LEAVEDQ : {
int newPC = pc + getShort(iCode, pc) - 1;
out.println(tname + " " + newPC);
pc += 2;
break;
}
case Icode_VAR_INC_DEC :
case Icode_NAME_INC_DEC :
case Icode_PROP_INC_DEC :
case Icode_ELEM_INC_DEC :
case Icode_REF_INC_DEC: {
int incrDecrType = iCode[pc];
out.println(tname + " " + incrDecrType);
++pc;
break;
}
case Icode_CALLSPECIAL : {
int callType = iCode[pc] & 0xFF;
boolean isNew = (iCode[pc + 1] != 0);
int line = getIndex(iCode, pc+2);
out.println(tname+" "+callType+" "+isNew+" "+indexReg+" "+line);
pc += 4;
break;
}
case Token.CATCH_SCOPE:
{
boolean afterFisrtFlag = (iCode[pc] != 0);
out.println(tname+" "+afterFisrtFlag);
++pc;
}
break;
case Token.REGEXP :
out.println(tname+" "+idata.itsRegExpLiterals[indexReg]);
break;
case Token.OBJECTLIT :
case Icode_SPARE_ARRAYLIT :
out.println(tname+" "+idata.literalIds[indexReg]);
break;
case Icode_CLOSURE_EXPR :
case Icode_CLOSURE_STMT :
out.println(tname+" "+idata.itsNestedFunctions[indexReg]);
break;
case Token.CALL :
case Icode_TAIL_CALL :
case Token.REF_CALL :
case Token.NEW :
out.println(tname+' '+indexReg);
break;
case Token.THROW : {
int line = getIndex(iCode, pc);
out.println(tname + " : " + line);
pc += 2;
break;
}
case Icode_SHORTNUMBER : {
int value = getShort(iCode, pc);
out.println(tname + " " + value);
pc += 2;
break;
}
case Icode_INTNUMBER : {
int value = getInt(iCode, pc);
out.println(tname + " " + value);
pc += 4;
break;
}
case Token.NUMBER : {
double value = idata.itsDoubleTable[indexReg];
out.println(tname + " " + value);
pc += 2;
break;
}
case Icode_LINE : {
int line = getIndex(iCode, pc);
out.println(tname + " : " + line);
pc += 2;
break;
}
case Icode_REG_STR1: {
String str = strings[0xFF & iCode[pc]];
out.println(tname + " \"" + str + '"');
++pc;
break;
}
case Icode_REG_STR2: {
String str = strings[getIndex(iCode, pc)];
out.println(tname + " \"" + str + '"');
pc += 2;
break;
}
case Icode_REG_STR4: {
String str = strings[getInt(iCode, pc)];
out.println(tname + " \"" + str + '"');
pc += 4;
break;
}
case Icode_REG_IND1: {
indexReg = 0xFF & iCode[pc];
out.println(tname+" "+indexReg);
++pc;
break;
}
case Icode_REG_IND2: {
indexReg = getIndex(iCode, pc);
out.println(tname+" "+indexReg);
pc += 2;
break;
}
case Icode_REG_IND4: {
indexReg = getInt(iCode, pc);
out.println(tname+" "+indexReg);
pc += 4;
break;
}
case Icode_GETVAR1:
case Icode_SETVAR1:
indexReg = iCode[pc];
out.println(tname+" "+indexReg);
++pc;
break;
}
if (old_pc + icodeLength != pc) Kit.codeBug();
}
int[] table = idata.itsExceptionTable;
if (table != null) {
out.println("Exception handlers: "
+table.length / EXCEPTION_SLOT_SIZE);
for (int i = 0; i != table.length;
i += EXCEPTION_SLOT_SIZE)
{
int tryStart = table[i + EXCEPTION_TRY_START_SLOT];
int tryEnd = table[i + EXCEPTION_TRY_END_SLOT];
int handlerStart = table[i + EXCEPTION_HANDLER_SLOT];
int type = table[i + EXCEPTION_TYPE_SLOT];
int exceptionLocal = table[i + EXCEPTION_LOCAL_SLOT];
int scopeLocal = table[i + EXCEPTION_SCOPE_SLOT];
out.println(" tryStart="+tryStart+" tryEnd="+tryEnd
+" handlerStart="+handlerStart
+" type="+(type == 0 ? "catch" : "finally")
+" exceptionLocal="+exceptionLocal);
}
}
out.flush();
}
private static int bytecodeSpan(int bytecode)
{
switch (bytecode) {
case Token.THROW :
// source line
return 1 + 2;
case Icode_GOSUB :
case Token.GOTO :
case Token.IFEQ :
case Token.IFNE :
case Icode_IFEQ_POP :
case Icode_LEAVEDQ :
// target pc offset
return 1 + 2;
case Icode_CALLSPECIAL :
// call type
// is new
// line number
return 1 + 1 + 1 + 2;
case Token.CATCH_SCOPE:
// scope flag
return 1 + 1;
case Icode_VAR_INC_DEC:
case Icode_NAME_INC_DEC:
case Icode_PROP_INC_DEC:
case Icode_ELEM_INC_DEC:
case Icode_REF_INC_DEC:
// type of ++/--
return 1 + 1;
case Icode_SHORTNUMBER :
// short number
return 1 + 2;
case Icode_INTNUMBER :
// int number
return 1 + 4;
case Icode_REG_IND1:
// ubyte index
return 1 + 1;
case Icode_REG_IND2:
// ushort index
return 1 + 2;
case Icode_REG_IND4:
// int index
return 1 + 4;
case Icode_REG_STR1:
// ubyte string index
return 1 + 1;
case Icode_REG_STR2:
// ushort string index
return 1 + 2;
case Icode_REG_STR4:
// int string index
return 1 + 4;
case Icode_GETVAR1:
case Icode_SETVAR1:
// byte var index
return 1 + 1;
case Icode_LINE :
// line number
return 1 + 2;
}
if (!validBytecode(bytecode)) throw Kit.codeBug();
return 1;
}
static int[] getLineNumbers(InterpreterData data)
{
UintMap presentLines = new UintMap();
byte[] iCode = data.itsICode;
int iCodeLength = iCode.length;
for (int pc = 0; pc != iCodeLength;) {
int bytecode = iCode[pc];
int span = bytecodeSpan(bytecode);
if (bytecode == Icode_LINE) {
if (span != 3) Kit.codeBug();
int line = getIndex(iCode, pc + 1);
presentLines.put(line, 0);
}
pc += span;
}
return presentLines.getKeys();
}
static void captureInterpreterStackInfo(RhinoException ex)
{
Context cx = Context.getCurrentContext();
if (cx == null || cx.lastInterpreterFrame == null) {
// No interpreter invocations
ex.interpreterStackInfo = null;
ex.interpreterLineData = null;
return;
}
// has interpreter frame on the stack
CallFrame[] array;
if (cx.previousInterpreterInvocations == null
|| cx.previousInterpreterInvocations.size() == 0)
{
array = new CallFrame[1];
} else {
int previousCount = cx.previousInterpreterInvocations.size();
if (cx.previousInterpreterInvocations.peek()
== cx.lastInterpreterFrame)
{
// It can happen if exception was generated after
// frame was pushed to cx.previousInterpreterInvocations
// but before assignment to cx.lastInterpreterFrame.
// In this case frames has to be ignored.
--previousCount;
}
array = new CallFrame[previousCount + 1];
cx.previousInterpreterInvocations.toArray(array);
}
array[array.length - 1] = (CallFrame)cx.lastInterpreterFrame;
int interpreterFrameCount = 0;
for (int i = 0; i != array.length; ++i) {
interpreterFrameCount += 1 + array[i].frameIndex;
}
int[] linePC = new int[interpreterFrameCount];
// Fill linePC with pc positions from all interpreter frames.
// Start from the most nested frame
int linePCIndex = interpreterFrameCount;
for (int i = array.length; i != 0;) {
--i;
CallFrame frame = array[i];
while (frame != null) {
--linePCIndex;
linePC[linePCIndex] = frame.pcSourceLineStart;
frame = frame.parentFrame;
}
}
if (linePCIndex != 0) Kit.codeBug();
ex.interpreterStackInfo = array;
ex.interpreterLineData = linePC;
}
static String getSourcePositionFromStack(Context cx, int[] linep)
{
CallFrame frame = (CallFrame)cx.lastInterpreterFrame;
InterpreterData idata = frame.idata;
if (frame.pcSourceLineStart >= 0) {
linep[0] = getIndex(idata.itsICode, frame.pcSourceLineStart);
} else {
linep[0] = 0;
}
return idata.itsSourceFile;
}
static String getPatchedStack(RhinoException ex,
String nativeStackTrace)
{
String tag = "org.mozilla.javascript.Interpreter.interpretLoop";
StringBuffer sb = new StringBuffer(nativeStackTrace.length() + 1000);
String lineSeparator = SecurityUtilities.getSystemProperty("line.separator");
CallFrame[] array = (CallFrame[])ex.interpreterStackInfo;
int[] linePC = ex.interpreterLineData;
int arrayIndex = array.length;
int linePCIndex = linePC.length;
int offset = 0;
while (arrayIndex != 0) {
--arrayIndex;
int pos = nativeStackTrace.indexOf(tag, offset);
if (pos < 0) {
break;
}
// Skip tag length
pos += tag.length();
// Skip until the end of line
for (; pos != nativeStackTrace.length(); ++pos) {
char c = nativeStackTrace.charAt(pos);
if (c == '\n' || c == '\r') {
break;
}
}
sb.append(nativeStackTrace.substring(offset, pos));
offset = pos;
CallFrame frame = array[arrayIndex];
while (frame != null) {
if (linePCIndex == 0) Kit.codeBug();
--linePCIndex;
InterpreterData idata = frame.idata;
sb.append(lineSeparator);
sb.append("\tat script");
if (idata.itsName != null && idata.itsName.length() != 0) {
sb.append('.');
sb.append(idata.itsName);
}
sb.append('(');
sb.append(idata.itsSourceFile);
int pc = linePC[linePCIndex];
if (pc >= 0) {
// Include line info only if available
sb.append(':');
sb.append(getIndex(idata.itsICode, pc));
}
sb.append(')');
frame = frame.parentFrame;
}
}
sb.append(nativeStackTrace.substring(offset));
return sb.toString();
}
static String getEncodedSource(InterpreterData idata)
{
if (idata.encodedSource == null) {
return null;
}
return idata.encodedSource.substring(idata.encodedSourceStart,
idata.encodedSourceEnd);
}
private static void initFunction(Context cx, Scriptable scope,
InterpretedFunction parent, int index)
{
InterpretedFunction fn;
fn = InterpretedFunction.createFunction(cx, scope, parent, index);
ScriptRuntime.initFunction(cx, scope, fn, fn.idata.itsFunctionType,
parent.idata.evalScriptFlag);
}
static Object interpret(InterpretedFunction ifun,
Context cx, Scriptable scope,
Scriptable thisObj, Object[] args)
{
if (!ScriptRuntime.hasTopCall(cx)) Kit.codeBug();
if (cx.interpreterSecurityDomain != ifun.securityDomain) {
Object savedDomain = cx.interpreterSecurityDomain;
cx.interpreterSecurityDomain = ifun.securityDomain;
try {
return ifun.securityController.callWithDomain(
ifun.securityDomain, cx, ifun, scope, thisObj, args);
} finally {
cx.interpreterSecurityDomain = savedDomain;
}
}
CallFrame frame = new CallFrame();
initFrame(cx, scope, thisObj, args, null, 0, args.length,
ifun, null, frame);
return interpretLoop(cx, frame, null);
}
public static Object restartContinuation(Continuation c, Context cx,
Scriptable scope, Object[] args)
{
if (!ScriptRuntime.hasTopCall(cx)) {
return ScriptRuntime.doTopCall(c, cx, scope, null, args);
}
Object arg;
if (args.length == 0) {
arg = Undefined.instance;
} else {
arg = args[0];
}
CallFrame capturedFrame = (CallFrame)c.getImplementation();
if (capturedFrame == null) {
// No frames to restart
return arg;
}
ContinuationJump cjump = new ContinuationJump(c, null);
cjump.result = arg;
return interpretLoop(cx, null, cjump);
}
private static Object interpretLoop(Context cx, CallFrame frame,
Object throwable)
{
// throwable holds exception object to rethrow or catch
// It is also used for continuation restart in which case
// it holds ContinuationJump
final Object DBL_MRK = UniqueTag.DOUBLE_MARK;
final Object undefined = Undefined.instance;
final boolean instructionCounting = (cx.instructionThreshold != 0);
// arbitrary number to add to instructionCount when calling
// other functions
final int INVOCATION_COST = 100;
// arbitrary exception cost for instruction counting
final int EXCEPTION_COST = 100;
String stringReg = null;
int indexReg = -1;
if (cx.lastInterpreterFrame != null) {
// save the top frame from the previous interpreterLoop
// invocation on the stack
if (cx.previousInterpreterInvocations == null) {
cx.previousInterpreterInvocations = new ObjArray();
}
cx.previousInterpreterInvocations.push(cx.lastInterpreterFrame);
}
// When restarting continuation throwable is not null and to jump
// to the code that rewind continuation state indexReg should be set
// to -1.
// With the normal call throable == null and indexReg == -1 allows to
// catch bugs with using indeReg to access array eleemnts before
// initializing indexReg.
if (throwable != null) {
// Assert assumptions
if (!(throwable instanceof ContinuationJump)) {
// It should be continuation
Kit.codeBug();
}
}
Object interpreterResult = null;
double interpreterResultDbl = 0.0;
StateLoop: for (;;) {
withoutExceptions: try {
if (throwable != null) {
// Recovering from exception, indexReg contains
// the index of handler
if (indexReg >= 0) {
// Normal excepton handler, transfer
// control appropriately
if (frame.frozen) {
// XXX Deal with exceptios!!!
frame = frame.cloneFrozen();
}
int[] table = frame.idata.itsExceptionTable;
frame.pc = table[indexReg + EXCEPTION_HANDLER_SLOT];
if (instructionCounting) {
frame.pcPrevBranch = frame.pc;
}
frame.savedStackTop = frame.emptyStackTop;
int scopeLocal = frame.localShift
+ table[indexReg
+ EXCEPTION_SCOPE_SLOT];
int exLocal = frame.localShift
+ table[indexReg
+ EXCEPTION_LOCAL_SLOT];
frame.scope = (Scriptable)frame.stack[scopeLocal];
frame.stack[exLocal] = throwable;
throwable = null;
} else {
// Continuation restoration
ContinuationJump cjump = (ContinuationJump)throwable;
// Clear throwable to indicate that execptions are OK
throwable = null;
if (cjump.branchFrame != frame) Kit.codeBug();
// Check that we have at least one frozen frame
// in the case of detached continuation restoration:
// unwind code ensure that
if (cjump.capturedFrame == null) Kit.codeBug();
// Need to rewind branchFrame, capturedFrame
// and all frames in between
int rewindCount = cjump.capturedFrame.frameIndex + 1;
if (cjump.branchFrame != null) {
rewindCount -= cjump.branchFrame.frameIndex;
}
int enterCount = 0;
CallFrame[] enterFrames = null;
CallFrame x = cjump.capturedFrame;
for (int i = 0; i != rewindCount; ++i) {
if (!x.frozen) Kit.codeBug();
if (isFrameEnterExitRequired(x)) {
if (enterFrames == null) {
// Allocate enough space to store the rest
// of rewind frames in case all of them
// would require to enter
enterFrames = new CallFrame[rewindCount
- i];
}
enterFrames[enterCount] = x;
++enterCount;
}
x = x.parentFrame;
}
while (enterCount != 0) {
// execute enter: walk enterFrames in the reverse
// order since they were stored starting from
// the capturedFrame, not branchFrame
--enterCount;
x = enterFrames[enterCount];
enterFrame(cx, x, ScriptRuntime.emptyArgs, true);
}
// Continuation jump is almost done: capturedFrame
// points to the call to the function that captured
// continuation, so clone capturedFrame and
// emulate return that function with the suplied result
frame = cjump.capturedFrame.cloneFrozen();
setCallResult(frame, cjump.result, cjump.resultDbl);
// restart the execution
}
} else {
if (frame.frozen) Kit.codeBug();
}
// Use local variables for constant values in frame
// for faster access
Object[] stack = frame.stack;
double[] sDbl = frame.sDbl;
Object[] vars = frame.varSource.stack;
double[] varDbls = frame.varSource.sDbl;
byte[] iCode = frame.idata.itsICode;
String[] strings = frame.idata.itsStringTable;
// Use local for stackTop as well. Since execption handlers
// can only exist at statement level where stack is empty,
// it is necessary to save/restore stackTop only accross
// function calls and normal returns.
int stackTop = frame.savedStackTop;
// Store new frame in cx which is used for error reporting etc.
cx.lastInterpreterFrame = frame;
Loop: for (;;) {
// Exception handler assumes that PC is already incremented
// pass the instruction start when it searches the
// exception handler
int op = iCode[frame.pc++];
jumplessRun: {
// Back indent to ease imlementation reading
switch (op) {
case Token.THROW: {
Object value = stack[stackTop];
if (value == DBL_MRK) value = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
int sourceLine = getIndex(iCode, frame.pc);
throwable = new JavaScriptException(value,
frame.idata.itsSourceFile,
sourceLine);
break withoutExceptions;
}
case Token.RETHROW: {
indexReg += frame.localShift;
throwable = stack[indexReg];
break withoutExceptions;
}
case Token.GE :
case Token.LE :
case Token.GT :
case Token.LT : {
--stackTop;
Object rhs = stack[stackTop + 1];
Object lhs = stack[stackTop];
boolean valBln;
object_compare:
{
number_compare:
{
double rDbl, lDbl;
if (rhs == DBL_MRK) {
rDbl = sDbl[stackTop + 1];
lDbl = stack_double(frame, stackTop);
} else if (lhs == DBL_MRK) {
rDbl = ScriptRuntime.toNumber(rhs);
lDbl = sDbl[stackTop];
} else {
break number_compare;
}
switch (op) {
case Token.GE:
valBln = (lDbl >= rDbl);
break object_compare;
case Token.LE:
valBln = (lDbl <= rDbl);
break object_compare;
case Token.GT:
valBln = (lDbl > rDbl);
break object_compare;
case Token.LT:
valBln = (lDbl < rDbl);
break object_compare;
default:
throw Kit.codeBug();
}
}
switch (op) {
case Token.GE:
valBln = ScriptRuntime.cmp_LE(rhs, lhs);
break;
case Token.LE:
valBln = ScriptRuntime.cmp_LE(lhs, rhs);
break;
case Token.GT:
valBln = ScriptRuntime.cmp_LT(rhs, lhs);
break;
case Token.LT:
valBln = ScriptRuntime.cmp_LT(lhs, rhs);
break;
default:
throw Kit.codeBug();
}
}
stack[stackTop] = ScriptRuntime.wrapBoolean(valBln);
continue Loop;
}
case Token.IN :
case Token.INSTANCEOF : {
Object rhs = stack[stackTop];
if (rhs == DBL_MRK) rhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
boolean valBln;
if (op == Token.IN) {
valBln = ScriptRuntime.in(lhs, rhs, cx);
} else {
valBln = ScriptRuntime.instanceOf(lhs, rhs, cx);
}
stack[stackTop] = ScriptRuntime.wrapBoolean(valBln);
continue Loop;
}
case Token.EQ :
case Token.NE : {
--stackTop;
boolean valBln;
Object rhs = stack[stackTop + 1];
Object lhs = stack[stackTop];
if (rhs == DBL_MRK) {
if (lhs == DBL_MRK) {
valBln = (sDbl[stackTop] == sDbl[stackTop + 1]);
} else {
valBln = ScriptRuntime.eqNumber(sDbl[stackTop + 1], lhs);
}
} else {
if (lhs == DBL_MRK) {
valBln = ScriptRuntime.eqNumber(sDbl[stackTop], rhs);
} else {
valBln = ScriptRuntime.eq(lhs, rhs);
}
}
valBln ^= (op == Token.NE);
stack[stackTop] = ScriptRuntime.wrapBoolean(valBln);
continue Loop;
}
case Token.SHEQ :
case Token.SHNE : {
--stackTop;
Object rhs = stack[stackTop + 1];
Object lhs = stack[stackTop];
boolean valBln;
shallow_compare: {
double rdbl, ldbl;
if (rhs == DBL_MRK) {
rdbl = sDbl[stackTop + 1];
if (lhs == DBL_MRK) {
ldbl = sDbl[stackTop];
} else if (lhs instanceof Number) {
ldbl = ((Number)lhs).doubleValue();
} else {
valBln = false;
break shallow_compare;
}
} else if (lhs == DBL_MRK) {
ldbl = sDbl[stackTop];
if (rhs == DBL_MRK) {
rdbl = sDbl[stackTop + 1];
} else if (rhs instanceof Number) {
rdbl = ((Number)rhs).doubleValue();
} else {
valBln = false;
break shallow_compare;
}
} else {
valBln = ScriptRuntime.shallowEq(lhs, rhs);
break shallow_compare;
}
valBln = (ldbl == rdbl);
}
valBln ^= (op == Token.SHNE);
stack[stackTop] = ScriptRuntime.wrapBoolean(valBln);
continue Loop;
}
case Token.IFNE :
if (stack_boolean(frame, stackTop--)) {
frame.pc += 2;
continue Loop;
}
break jumplessRun;
case Token.IFEQ :
if (!stack_boolean(frame, stackTop--)) {
frame.pc += 2;
continue Loop;
}
break jumplessRun;
case Icode_IFEQ_POP :
if (!stack_boolean(frame, stackTop--)) {
frame.pc += 2;
continue Loop;
}
stack[stackTop--] = null;
break jumplessRun;
case Token.GOTO :
break jumplessRun;
case Icode_GOSUB :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = frame.pc + 2;
break jumplessRun;
case Icode_STARTSUB :
if (stackTop == frame.emptyStackTop + 1) {
// Call from Icode_GOSUB: store return PC address in the local
indexReg += frame.localShift;
stack[indexReg] = stack[stackTop];
sDbl[indexReg] = sDbl[stackTop];
--stackTop;
} else {
// Call from exception handler: exception object is already stored
// in the local
if (stackTop != frame.emptyStackTop) Kit.codeBug();
}
continue Loop;
case Icode_RETSUB : {
// indexReg: local to store return address
if (instructionCounting) {
addInstructionCount(cx, frame, 0);
}
indexReg += frame.localShift;
Object value = stack[indexReg];
if (value != DBL_MRK) {
// Invocation from exception handler, restore object to rethrow
throwable = value;
break withoutExceptions;
}
// Normal return from GOSUB
frame.pc = (int)sDbl[indexReg];
if (instructionCounting) {
frame.pcPrevBranch = frame.pc;
}
continue Loop;
}
case Icode_POP :
stack[stackTop] = null;
stackTop--;
continue Loop;
case Icode_POP_RESULT :
frame.result = stack[stackTop];
frame.resultDbl = sDbl[stackTop];
stack[stackTop] = null;
--stackTop;
continue Loop;
case Icode_DUP :
stack[stackTop + 1] = stack[stackTop];
sDbl[stackTop + 1] = sDbl[stackTop];
stackTop++;
continue Loop;
case Icode_DUP2 :
stack[stackTop + 1] = stack[stackTop - 1];
sDbl[stackTop + 1] = sDbl[stackTop - 1];
stack[stackTop + 2] = stack[stackTop];
sDbl[stackTop + 2] = sDbl[stackTop];
stackTop += 2;
continue Loop;
case Icode_SWAP : {
Object o = stack[stackTop];
stack[stackTop] = stack[stackTop - 1];
stack[stackTop - 1] = o;
double d = sDbl[stackTop];
sDbl[stackTop] = sDbl[stackTop - 1];
sDbl[stackTop - 1] = d;
continue Loop;
}
case Token.RETURN :
frame.result = stack[stackTop];
frame.resultDbl = sDbl[stackTop];
--stackTop;
break Loop;
case Token.RETURN_RESULT :
break Loop;
case Icode_RETUNDEF :
frame.result = undefined;
break Loop;
case Token.BITNOT : {
int rIntValue = stack_int32(frame, stackTop);
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = ~rIntValue;
continue Loop;
}
case Token.BITAND :
case Token.BITOR :
case Token.BITXOR :
case Token.LSH :
case Token.RSH : {
int rIntValue = stack_int32(frame, stackTop);
--stackTop;
int lIntValue = stack_int32(frame, stackTop);
stack[stackTop] = DBL_MRK;
switch (op) {
case Token.BITAND:
lIntValue &= rIntValue;
break;
case Token.BITOR:
lIntValue |= rIntValue;
break;
case Token.BITXOR:
lIntValue ^= rIntValue;
break;
case Token.LSH:
lIntValue <<= rIntValue;
break;
case Token.RSH:
lIntValue >>= rIntValue;
break;
}
sDbl[stackTop] = lIntValue;
continue Loop;
}
case Token.URSH : {
int rIntValue = stack_int32(frame, stackTop) & 0x1F;
--stackTop;
double lDbl = stack_double(frame, stackTop);
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = ScriptRuntime.toUint32(lDbl) >>> rIntValue;
continue Loop;
}
case Token.NEG :
case Token.POS : {
double rDbl = stack_double(frame, stackTop);
stack[stackTop] = DBL_MRK;
if (op == Token.NEG) {
rDbl = -rDbl;
}
sDbl[stackTop] = rDbl;
continue Loop;
}
case Token.ADD :
--stackTop;
do_add(stack, sDbl, stackTop, cx);
continue Loop;
case Token.SUB :
case Token.MUL :
case Token.DIV :
case Token.MOD : {
double rDbl = stack_double(frame, stackTop);
--stackTop;
double lDbl = stack_double(frame, stackTop);
stack[stackTop] = DBL_MRK;
switch (op) {
case Token.SUB:
lDbl -= rDbl;
break;
case Token.MUL:
lDbl *= rDbl;
break;
case Token.DIV:
lDbl /= rDbl;
break;
case Token.MOD:
lDbl %= rDbl;
break;
}
sDbl[stackTop] = lDbl;
continue Loop;
}
case Token.NOT :
stack[stackTop] = ScriptRuntime.wrapBoolean(
!stack_boolean(frame, stackTop));
continue Loop;
case Token.BINDNAME :
stack[++stackTop] = ScriptRuntime.bind(cx, frame.scope, stringReg);
continue Loop;
case Token.SETNAME : {
Object rhs = stack[stackTop];
if (rhs == DBL_MRK) rhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Scriptable lhs = (Scriptable)stack[stackTop];
stack[stackTop] = ScriptRuntime.setName(lhs, rhs, cx,
frame.scope, stringReg);
continue Loop;
}
case Token.DELPROP : {
Object rhs = stack[stackTop];
if (rhs == DBL_MRK) rhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.delete(lhs, rhs, cx);
continue Loop;
}
case Token.GETPROP : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.getObjectProp(lhs, stringReg, cx);
continue Loop;
}
case Token.SETPROP : {
Object rhs = stack[stackTop];
if (rhs == DBL_MRK) rhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.setObjectProp(lhs, stringReg, rhs,
cx);
continue Loop;
}
case Icode_PROP_INC_DEC : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.propIncrDecr(lhs, stringReg,
cx, iCode[frame.pc]);
++frame.pc;
continue Loop;
}
case Token.GETELEM : {
--stackTop;
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) {
lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
}
Object value;
Object id = stack[stackTop + 1];
if (id != DBL_MRK) {
value = ScriptRuntime.getObjectElem(lhs, id, cx);
} else {
double d = sDbl[stackTop + 1];
value = ScriptRuntime.getObjectIndex(lhs, d, cx);
}
stack[stackTop] = value;
continue Loop;
}
case Token.SETELEM : {
stackTop -= 2;
Object rhs = stack[stackTop + 2];
if (rhs == DBL_MRK) {
rhs = ScriptRuntime.wrapNumber(sDbl[stackTop + 2]);
}
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) {
lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
}
Object value;
Object id = stack[stackTop + 1];
if (id != DBL_MRK) {
value = ScriptRuntime.setObjectElem(lhs, id, rhs, cx);
} else {
double d = sDbl[stackTop + 1];
value = ScriptRuntime.setObjectIndex(lhs, d, rhs, cx);
}
stack[stackTop] = value;
continue Loop;
}
case Icode_ELEM_INC_DEC: {
Object rhs = stack[stackTop];
if (rhs == DBL_MRK) rhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.elemIncrDecr(lhs, rhs, cx,
iCode[frame.pc]);
++frame.pc;
continue Loop;
}
case Token.GET_REF : {
Ref ref = (Ref)stack[stackTop];
stack[stackTop] = ScriptRuntime.refGet(ref, cx);
continue Loop;
}
case Token.SET_REF : {
Object value = stack[stackTop];
if (value == DBL_MRK) value = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Ref ref = (Ref)stack[stackTop];
stack[stackTop] = ScriptRuntime.refSet(ref, value, cx);
continue Loop;
}
case Token.DEL_REF : {
Ref ref = (Ref)stack[stackTop];
stack[stackTop] = ScriptRuntime.refDel(ref, cx);
continue Loop;
}
case Icode_REF_INC_DEC : {
Ref ref = (Ref)stack[stackTop];
stack[stackTop] = ScriptRuntime.refIncrDecr(ref, cx, iCode[frame.pc]);
++frame.pc;
continue Loop;
}
case Token.LOCAL_LOAD :
++stackTop;
indexReg += frame.localShift;
stack[stackTop] = stack[indexReg];
sDbl[stackTop] = sDbl[indexReg];
continue Loop;
case Icode_LOCAL_CLEAR :
indexReg += frame.localShift;
stack[indexReg] = null;
continue Loop;
case Icode_NAME_AND_THIS :
// stringReg: name
++stackTop;
stack[stackTop] = ScriptRuntime.getNameFunctionAndThis(stringReg,
cx, frame.scope);
++stackTop;
stack[stackTop] = ScriptRuntime.lastStoredScriptable(cx);
continue Loop;
case Icode_PROP_AND_THIS: {
Object obj = stack[stackTop];
if (obj == DBL_MRK) obj = ScriptRuntime.wrapNumber(sDbl[stackTop]);
// stringReg: property
stack[stackTop] = ScriptRuntime.getPropFunctionAndThis(obj, stringReg,
cx);
++stackTop;
stack[stackTop] = ScriptRuntime.lastStoredScriptable(cx);
continue Loop;
}
case Icode_ELEM_AND_THIS: {
Object obj = stack[stackTop - 1];
if (obj == DBL_MRK) obj = ScriptRuntime.wrapNumber(sDbl[stackTop - 1]);
Object id = stack[stackTop];
if (id == DBL_MRK) id = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop - 1] = ScriptRuntime.getElemFunctionAndThis(obj, id, cx);
stack[stackTop] = ScriptRuntime.lastStoredScriptable(cx);
continue Loop;
}
case Icode_VALUE_AND_THIS : {
Object value = stack[stackTop];
if (value == DBL_MRK) value = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.getValueFunctionAndThis(value, cx);
++stackTop;
stack[stackTop] = ScriptRuntime.lastStoredScriptable(cx);
continue Loop;
}
case Icode_CALLSPECIAL : {
if (instructionCounting) {
cx.instructionCount += INVOCATION_COST;
}
int callType = iCode[frame.pc] & 0xFF;
boolean isNew = (iCode[frame.pc + 1] != 0);
int sourceLine = getIndex(iCode, frame.pc + 2);
// indexReg: number of arguments
if (isNew) {
// stack change: function arg0 .. argN -> newResult
stackTop -= indexReg;
Object function = stack[stackTop];
if (function == DBL_MRK)
function = ScriptRuntime.wrapNumber(sDbl[stackTop]);
Object[] outArgs = getArgsArray(
stack, sDbl, stackTop + 1, indexReg);
stack[stackTop] = ScriptRuntime.newSpecial(
cx, function, outArgs, frame.scope, callType);
} else {
// stack change: function thisObj arg0 .. argN -> result
stackTop -= 1 + indexReg;
// Call code generation ensure that stack here
// is ... Callable Scriptable
Scriptable functionThis = (Scriptable)stack[stackTop + 1];
Callable function = (Callable)stack[stackTop];
Object[] outArgs = getArgsArray(
stack, sDbl, stackTop + 2, indexReg);
stack[stackTop] = ScriptRuntime.callSpecial(
cx, function, functionThis, outArgs,
frame.scope, frame.thisObj, callType,
frame.idata.itsSourceFile, sourceLine);
}
frame.pc += 4;
continue Loop;
}
case Token.CALL :
case Icode_TAIL_CALL :
case Token.REF_CALL : {
if (instructionCounting) {
cx.instructionCount += INVOCATION_COST;
}
// stack change: function thisObj arg0 .. argN -> result
// indexReg: number of arguments
stackTop -= 1 + indexReg;
// CALL generation ensures that fun and funThisObj
// are already Scriptable and Callable objects respectively
Callable fun = (Callable)stack[stackTop];
Scriptable funThisObj = (Scriptable)stack[stackTop + 1];
if (op == Token.REF_CALL) {
Object[] outArgs = getArgsArray(stack, sDbl, stackTop + 2,
indexReg);
stack[stackTop] = ScriptRuntime.callRef(fun, funThisObj,
outArgs, cx);
continue Loop;
}
Scriptable calleeScope = frame.scope;
if (frame.useActivation) {
calleeScope = ScriptableObject.getTopLevelScope(frame.scope);
}
if (fun instanceof InterpretedFunction) {
InterpretedFunction ifun = (InterpretedFunction)fun;
if (frame.fnOrScript.securityDomain == ifun.securityDomain) {
CallFrame callParentFrame = frame;
CallFrame calleeFrame = new CallFrame();
if (op == Icode_TAIL_CALL) {
// In principle tail call can re-use the current
// frame and its stack arrays but it is hard to
// do properly. Any exceptions that can legally
// happen during frame re-initialization including
// StackOverflowException during innocent looking
// System.arraycopy may leave the current frame
// data corrupted leading to undefined behaviour
// in the catch code bellow that unwinds JS stack
// on exceptions. Then there is issue about frame release
// end exceptions there.
// To avoid frame allocation a released frame
// can be cached for re-use which would also benefit
// non-tail calls but it is not clear that this caching
// would gain in performance due to potentially
// bad iteraction with GC.
callParentFrame = frame.parentFrame;
- }
+ // Release the current frame. See Bug #344501 to see why
+ // it is being done here.
+ // TODO: If using the graphical debugger, tail call
+ // optimization will create a "hole" in the context stack.
+ // The correct thing to do may be to disable tail call
+ // optimization if the code is being debugged.
+ exitFrame(cx, frame, null); }
initFrame(cx, calleeScope, funThisObj, stack, sDbl,
stackTop + 2, indexReg, ifun, callParentFrame,
calleeFrame);
- if (op == Icode_TAIL_CALL) {
- // Release the parent
- exitFrame(cx, frame, null);
- } else {
+ if (op != Icode_TAIL_CALL) {
frame.savedStackTop = stackTop;
frame.savedCallOp = op;
}
frame = calleeFrame;
continue StateLoop;
}
}
if (fun instanceof Continuation) {
// Jump to the captured continuation
ContinuationJump cjump;
cjump = new ContinuationJump((Continuation)fun, frame);
// continuation result is the first argument if any
// of contination call
if (indexReg == 0) {
cjump.result = undefined;
} else {
cjump.result = stack[stackTop + 2];
cjump.resultDbl = sDbl[stackTop + 2];
}
// Start the real unwind job
throwable = cjump;
break withoutExceptions;
}
if (fun instanceof IdFunctionObject) {
IdFunctionObject ifun = (IdFunctionObject)fun;
if (Continuation.isContinuationConstructor(ifun)) {
captureContinuation(cx, frame, stackTop);
continue Loop;
}
}
Object[] outArgs = getArgsArray(stack, sDbl, stackTop + 2,
indexReg);
stack[stackTop] = fun.call(cx, calleeScope, funThisObj, outArgs);
continue Loop;
}
case Token.NEW : {
if (instructionCounting) {
cx.instructionCount += INVOCATION_COST;
}
// stack change: function arg0 .. argN -> newResult
// indexReg: number of arguments
stackTop -= indexReg;
Object lhs = stack[stackTop];
if (lhs instanceof InterpretedFunction) {
InterpretedFunction f = (InterpretedFunction)lhs;
if (frame.fnOrScript.securityDomain == f.securityDomain) {
Scriptable newInstance = f.createObject(cx, frame.scope);
CallFrame calleeFrame = new CallFrame();
initFrame(cx, frame.scope, newInstance, stack, sDbl,
stackTop + 1, indexReg, f, frame,
calleeFrame);
stack[stackTop] = newInstance;
frame.savedStackTop = stackTop;
frame.savedCallOp = op;
frame = calleeFrame;
continue StateLoop;
}
}
if (!(lhs instanceof Function)) {
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
throw ScriptRuntime.notFunctionError(lhs);
}
Function fun = (Function)lhs;
if (fun instanceof IdFunctionObject) {
IdFunctionObject ifun = (IdFunctionObject)fun;
if (Continuation.isContinuationConstructor(ifun)) {
captureContinuation(cx, frame, stackTop);
continue Loop;
}
}
Object[] outArgs = getArgsArray(stack, sDbl, stackTop + 1, indexReg);
stack[stackTop] = fun.construct(cx, frame.scope, outArgs);
continue Loop;
}
case Token.TYPEOF : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.typeof(lhs);
continue Loop;
}
case Icode_TYPEOFNAME :
stack[++stackTop] = ScriptRuntime.typeofName(frame.scope, stringReg);
continue Loop;
case Token.STRING :
stack[++stackTop] = stringReg;
continue Loop;
case Icode_SHORTNUMBER :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = getShort(iCode, frame.pc);
frame.pc += 2;
continue Loop;
case Icode_INTNUMBER :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = getInt(iCode, frame.pc);
frame.pc += 4;
continue Loop;
case Token.NUMBER :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = frame.idata.itsDoubleTable[indexReg];
continue Loop;
case Token.NAME :
stack[++stackTop] = ScriptRuntime.name(cx, frame.scope, stringReg);
continue Loop;
case Icode_NAME_INC_DEC :
stack[++stackTop] = ScriptRuntime.nameIncrDecr(frame.scope, stringReg,
iCode[frame.pc]);
++frame.pc;
continue Loop;
case Icode_SETVAR1:
indexReg = iCode[frame.pc++];
// fallthrough
case Token.SETVAR :
if (!frame.useActivation) {
vars[indexReg] = stack[stackTop];
varDbls[indexReg] = sDbl[stackTop];
} else {
Object val = stack[stackTop];
if (val == DBL_MRK) val = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stringReg = frame.idata.argNames[indexReg];
frame.scope.put(stringReg, frame.scope, val);
}
continue Loop;
case Icode_GETVAR1:
indexReg = iCode[frame.pc++];
// fallthrough
case Token.GETVAR :
++stackTop;
if (!frame.useActivation) {
stack[stackTop] = vars[indexReg];
sDbl[stackTop] = varDbls[indexReg];
} else {
stringReg = frame.idata.argNames[indexReg];
stack[stackTop] = frame.scope.get(stringReg, frame.scope);
}
continue Loop;
case Icode_VAR_INC_DEC : {
// indexReg : varindex
++stackTop;
int incrDecrMask = iCode[frame.pc];
if (!frame.useActivation) {
stack[stackTop] = DBL_MRK;
Object varValue = vars[indexReg];
double d;
if (varValue == DBL_MRK) {
d = varDbls[indexReg];
} else {
d = ScriptRuntime.toNumber(varValue);
vars[indexReg] = DBL_MRK;
}
double d2 = ((incrDecrMask & Node.DECR_FLAG) == 0)
? d + 1.0 : d - 1.0;
varDbls[indexReg] = d2;
sDbl[stackTop] = ((incrDecrMask & Node.POST_FLAG) == 0) ? d2 : d;
} else {
String varName = frame.idata.argNames[indexReg];
stack[stackTop] = ScriptRuntime.nameIncrDecr(frame.scope, varName,
incrDecrMask);
}
++frame.pc;
continue Loop;
}
case Icode_ZERO :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = 0;
continue Loop;
case Icode_ONE :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = 1;
continue Loop;
case Token.NULL :
stack[++stackTop] = null;
continue Loop;
case Token.THIS :
stack[++stackTop] = frame.thisObj;
continue Loop;
case Token.THISFN :
stack[++stackTop] = frame.fnOrScript;
continue Loop;
case Token.FALSE :
stack[++stackTop] = Boolean.FALSE;
continue Loop;
case Token.TRUE :
stack[++stackTop] = Boolean.TRUE;
continue Loop;
case Icode_UNDEF :
stack[++stackTop] = undefined;
continue Loop;
case Token.ENTERWITH : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
frame.scope = ScriptRuntime.enterWith(lhs, cx, frame.scope);
continue Loop;
}
case Token.LEAVEWITH :
frame.scope = ScriptRuntime.leaveWith(frame.scope);
continue Loop;
case Token.CATCH_SCOPE : {
// stack top: exception object
// stringReg: name of exception variable
// indexReg: local for exception scope
--stackTop;
indexReg += frame.localShift;
boolean afterFirstScope = (frame.idata.itsICode[frame.pc] != 0);
Throwable caughtException = (Throwable)stack[stackTop + 1];
Scriptable lastCatchScope;
if (!afterFirstScope) {
lastCatchScope = null;
} else {
lastCatchScope = (Scriptable)stack[indexReg];
}
stack[indexReg] = ScriptRuntime.newCatchScope(caughtException,
lastCatchScope, stringReg,
cx, frame.scope);
++frame.pc;
continue Loop;
}
case Token.ENUM_INIT_KEYS :
case Token.ENUM_INIT_VALUES : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
indexReg += frame.localShift;
stack[indexReg] = ScriptRuntime.enumInit(
lhs, cx, (op == Token.ENUM_INIT_VALUES));
continue Loop;
}
case Token.ENUM_NEXT :
case Token.ENUM_ID : {
indexReg += frame.localShift;
Object val = stack[indexReg];
++stackTop;
stack[stackTop] = (op == Token.ENUM_NEXT)
? (Object)ScriptRuntime.enumNext(val)
: (Object)ScriptRuntime.enumId(val, cx);
continue Loop;
}
case Token.REF_SPECIAL : {
//stringReg: name of special property
Object obj = stack[stackTop];
if (obj == DBL_MRK) obj = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.specialRef(obj, stringReg, cx);
continue Loop;
}
case Token.REF_MEMBER: {
//indexReg: flags
Object elem = stack[stackTop];
if (elem == DBL_MRK) elem = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object obj = stack[stackTop];
if (obj == DBL_MRK) obj = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.memberRef(obj, elem, cx, indexReg);
continue Loop;
}
case Token.REF_NS_MEMBER: {
//indexReg: flags
Object elem = stack[stackTop];
if (elem == DBL_MRK) elem = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object ns = stack[stackTop];
if (ns == DBL_MRK) ns = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object obj = stack[stackTop];
if (obj == DBL_MRK) obj = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.memberRef(obj, ns, elem, cx, indexReg);
continue Loop;
}
case Token.REF_NAME: {
//indexReg: flags
Object name = stack[stackTop];
if (name == DBL_MRK) name = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.nameRef(name, cx, frame.scope,
indexReg);
continue Loop;
}
case Token.REF_NS_NAME: {
//indexReg: flags
Object name = stack[stackTop];
if (name == DBL_MRK) name = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object ns = stack[stackTop];
if (ns == DBL_MRK) ns = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.nameRef(ns, name, cx, frame.scope,
indexReg);
continue Loop;
}
case Icode_SCOPE_LOAD :
indexReg += frame.localShift;
frame.scope = (Scriptable)stack[indexReg];
continue Loop;
case Icode_SCOPE_SAVE :
indexReg += frame.localShift;
stack[indexReg] = frame.scope;
continue Loop;
case Icode_CLOSURE_EXPR :
stack[++stackTop] = InterpretedFunction.createFunction(cx, frame.scope,
frame.fnOrScript,
indexReg);
continue Loop;
case Icode_CLOSURE_STMT :
initFunction(cx, frame.scope, frame.fnOrScript, indexReg);
continue Loop;
case Token.REGEXP :
stack[++stackTop] = frame.scriptRegExps[indexReg];
continue Loop;
case Icode_LITERAL_NEW :
// indexReg: number of values in the literal
++stackTop;
stack[stackTop] = new Object[indexReg];
sDbl[stackTop] = 0;
continue Loop;
case Icode_LITERAL_SET : {
Object value = stack[stackTop];
if (value == DBL_MRK) value = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
int i = (int)sDbl[stackTop];
((Object[])stack[stackTop])[i] = value;
sDbl[stackTop] = i + 1;
continue Loop;
}
case Token.ARRAYLIT :
case Icode_SPARE_ARRAYLIT :
case Token.OBJECTLIT : {
Object[] data = (Object[])stack[stackTop];
Object val;
if (op == Token.OBJECTLIT) {
Object[] ids = (Object[])frame.idata.literalIds[indexReg];
val = ScriptRuntime.newObjectLiteral(ids, data, cx, frame.scope);
} else {
int[] skipIndexces = null;
if (op == Icode_SPARE_ARRAYLIT) {
skipIndexces = (int[])frame.idata.literalIds[indexReg];
}
val = ScriptRuntime.newArrayLiteral(data, skipIndexces, cx,
frame.scope);
}
stack[stackTop] = val;
continue Loop;
}
case Icode_ENTERDQ : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
frame.scope = ScriptRuntime.enterDotQuery(lhs, frame.scope);
continue Loop;
}
case Icode_LEAVEDQ : {
boolean valBln = stack_boolean(frame, stackTop);
Object x = ScriptRuntime.updateDotQuery(valBln, frame.scope);
if (x != null) {
stack[stackTop] = x;
frame.scope = ScriptRuntime.leaveDotQuery(frame.scope);
frame.pc += 2;
continue Loop;
}
// reset stack and PC to code after ENTERDQ
--stackTop;
break jumplessRun;
}
case Token.DEFAULTNAMESPACE : {
Object value = stack[stackTop];
if (value == DBL_MRK) value = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.setDefaultNamespace(value, cx);
continue Loop;
}
case Token.ESCXMLATTR : {
Object value = stack[stackTop];
if (value != DBL_MRK) {
stack[stackTop] = ScriptRuntime.escapeAttributeValue(value, cx);
}
continue Loop;
}
case Token.ESCXMLTEXT : {
Object value = stack[stackTop];
if (value != DBL_MRK) {
stack[stackTop] = ScriptRuntime.escapeTextValue(value, cx);
}
continue Loop;
}
case Icode_LINE :
frame.pcSourceLineStart = frame.pc;
if (frame.debuggerFrame != null) {
int line = getIndex(iCode, frame.pc);
frame.debuggerFrame.onLineChange(cx, line);
}
frame.pc += 2;
continue Loop;
case Icode_REG_IND_C0:
indexReg = 0;
continue Loop;
case Icode_REG_IND_C1:
indexReg = 1;
continue Loop;
case Icode_REG_IND_C2:
indexReg = 2;
continue Loop;
case Icode_REG_IND_C3:
indexReg = 3;
continue Loop;
case Icode_REG_IND_C4:
indexReg = 4;
continue Loop;
case Icode_REG_IND_C5:
indexReg = 5;
continue Loop;
case Icode_REG_IND1:
indexReg = 0xFF & iCode[frame.pc];
++frame.pc;
continue Loop;
case Icode_REG_IND2:
indexReg = getIndex(iCode, frame.pc);
frame.pc += 2;
continue Loop;
case Icode_REG_IND4:
indexReg = getInt(iCode, frame.pc);
frame.pc += 4;
continue Loop;
case Icode_REG_STR_C0:
stringReg = strings[0];
continue Loop;
case Icode_REG_STR_C1:
stringReg = strings[1];
continue Loop;
case Icode_REG_STR_C2:
stringReg = strings[2];
continue Loop;
case Icode_REG_STR_C3:
stringReg = strings[3];
continue Loop;
case Icode_REG_STR1:
stringReg = strings[0xFF & iCode[frame.pc]];
++frame.pc;
continue Loop;
case Icode_REG_STR2:
stringReg = strings[getIndex(iCode, frame.pc)];
frame.pc += 2;
continue Loop;
case Icode_REG_STR4:
stringReg = strings[getInt(iCode, frame.pc)];
frame.pc += 4;
continue Loop;
default :
dumpICode(frame.idata);
throw new RuntimeException(
"Unknown icode : "+op+" @ pc : "+(frame.pc-1));
} // end of interpreter switch
} // end of jumplessRun label block
// This should be reachable only for jump implementation
// when pc points to encoded target offset
if (instructionCounting) {
addInstructionCount(cx, frame, 2);
}
int offset = getShort(iCode, frame.pc);
if (offset != 0) {
// -1 accounts for pc pointing to jump opcode + 1
frame.pc += offset - 1;
} else {
frame.pc = frame.idata.longJumps.
getExistingInt(frame.pc);
}
if (instructionCounting) {
frame.pcPrevBranch = frame.pc;
}
continue Loop;
} // end of Loop: for
exitFrame(cx, frame, null);
interpreterResult = frame.result;
interpreterResultDbl = frame.resultDbl;
if (frame.parentFrame != null) {
frame = frame.parentFrame;
if (frame.frozen) {
frame = frame.cloneFrozen();
}
setCallResult(
frame, interpreterResult, interpreterResultDbl);
interpreterResult = null; // Help GC
continue StateLoop;
}
break StateLoop;
} // end of interpreter withoutExceptions: try
catch (Throwable ex) {
if (throwable != null) {
// This is serious bug and it is better to track it ASAP
ex.printStackTrace(System.err);
throw new IllegalStateException();
}
throwable = ex;
}
// This should be reachable only after above catch or from
// finally when it needs to propagate exception or from
// explicit throw
if (throwable == null) Kit.codeBug();
// Exception type
final int EX_CATCH_STATE = 2; // Can execute JS catch
final int EX_FINALLY_STATE = 1; // Can execute JS finally
final int EX_NO_JS_STATE = 0; // Terminate JS execution
int exState;
ContinuationJump cjump = null;
if (throwable instanceof JavaScriptException) {
exState = EX_CATCH_STATE;
} else if (throwable instanceof EcmaError) {
// an offical ECMA error object,
exState = EX_CATCH_STATE;
} else if (throwable instanceof EvaluatorException) {
exState = EX_CATCH_STATE;
} else if (throwable instanceof RuntimeException) {
exState = EX_FINALLY_STATE;
} else if (throwable instanceof Error) {
exState = EX_NO_JS_STATE;
} else {
// It must be ContinuationJump
exState = EX_FINALLY_STATE;
cjump = (ContinuationJump)throwable;
}
if (instructionCounting) {
try {
addInstructionCount(cx, frame, EXCEPTION_COST);
} catch (RuntimeException ex) {
throwable = ex;
exState = EX_FINALLY_STATE;
} catch (Error ex) {
// Error from instruction counting
// => unconditionally terminate JS
throwable = ex;
cjump = null;
exState = EX_NO_JS_STATE;
}
}
if (frame.debuggerFrame != null
&& throwable instanceof RuntimeException)
{
// Call debugger only for RuntimeException
RuntimeException rex = (RuntimeException)throwable;
try {
frame.debuggerFrame.onExceptionThrown(cx, rex);
} catch (Throwable ex) {
// Any exception from debugger
// => unconditionally terminate JS
throwable = ex;
cjump = null;
exState = EX_NO_JS_STATE;
}
}
for (;;) {
if (exState != EX_NO_JS_STATE) {
boolean onlyFinally = (exState != EX_CATCH_STATE);
indexReg = getExceptionHandler(frame, onlyFinally);
if (indexReg >= 0) {
// We caught an exception, restart the loop
// with exception pending the processing at the loop
// start
continue StateLoop;
}
}
// No allowed execption handlers in this frame, unwind
// to parent and try to look there
exitFrame(cx, frame, throwable);
frame = frame.parentFrame;
if (frame == null) { break; }
if (cjump != null && cjump.branchFrame == frame) {
// Continuation branch point was hit,
// restart the state loop to reenter continuation
indexReg = -1;
continue StateLoop;
}
}
// No more frames, rethrow the exception or deal with continuation
if (cjump != null) {
if (cjump.branchFrame != null) {
// The above loop should locate the top frame
Kit.codeBug();
}
if (cjump.capturedFrame != null) {
// Restarting detached continuation
indexReg = -1;
continue StateLoop;
}
// Return continuation result to the caller
interpreterResult = cjump.result;
interpreterResultDbl = cjump.resultDbl;
throwable = null;
}
break StateLoop;
} // end of StateLoop: for(;;)
// Do cleanups/restorations before the final return or throw
if (cx.previousInterpreterInvocations != null
&& cx.previousInterpreterInvocations.size() != 0)
{
cx.lastInterpreterFrame
= cx.previousInterpreterInvocations.pop();
} else {
// It was the last interpreter frame on the stack
cx.lastInterpreterFrame = null;
// Force GC of the value cx.previousInterpreterInvocations
cx.previousInterpreterInvocations = null;
}
if (throwable != null) {
if (throwable instanceof RuntimeException) {
throw (RuntimeException)throwable;
} else {
// Must be instance of Error or code bug
throw (Error)throwable;
}
}
return (interpreterResult != DBL_MRK)
? interpreterResult
: ScriptRuntime.wrapNumber(interpreterResultDbl);
}
private static void initFrame(Context cx, Scriptable callerScope,
Scriptable thisObj,
Object[] args, double[] argsDbl,
int argShift, int argCount,
InterpretedFunction fnOrScript,
CallFrame parentFrame, CallFrame frame)
{
InterpreterData idata = fnOrScript.idata;
boolean useActivation = idata.itsNeedsActivation;
DebugFrame debuggerFrame = null;
if (cx.debugger != null) {
debuggerFrame = cx.debugger.getFrame(cx, idata);
if (debuggerFrame != null) {
useActivation = true;
}
}
if (useActivation) {
// Copy args to new array to pass to enterActivationFunction
// or debuggerFrame.onEnter
if (argsDbl != null) {
args = getArgsArray(args, argsDbl, argShift, argCount);
}
argShift = 0;
argsDbl = null;
}
Scriptable scope;
if (idata.itsFunctionType != 0) {
if (!idata.useDynamicScope) {
scope = fnOrScript.getParentScope();
} else {
scope = callerScope;
}
if (useActivation) {
scope = ScriptRuntime.createFunctionActivation(
fnOrScript, scope, args);
}
} else {
scope = callerScope;
ScriptRuntime.initScript(fnOrScript, thisObj, cx, scope,
fnOrScript.idata.evalScriptFlag);
}
if (idata.itsNestedFunctions != null) {
if (idata.itsFunctionType != 0 && !idata.itsNeedsActivation)
Kit.codeBug();
for (int i = 0; i < idata.itsNestedFunctions.length; i++) {
InterpreterData fdata = idata.itsNestedFunctions[i];
if (fdata.itsFunctionType == FunctionNode.FUNCTION_STATEMENT) {
initFunction(cx, scope, fnOrScript, i);
}
}
}
Scriptable[] scriptRegExps = null;
if (idata.itsRegExpLiterals != null) {
// Wrapped regexps for functions are stored in
// InterpretedFunction
// but for script which should not contain references to scope
// the regexps re-wrapped during each script execution
if (idata.itsFunctionType != 0) {
scriptRegExps = fnOrScript.functionRegExps;
} else {
scriptRegExps = fnOrScript.createRegExpWraps(cx, scope);
}
}
// Initialize args, vars, locals and stack
int emptyStackTop = idata.itsMaxVars + idata.itsMaxLocals - 1;
int maxFrameArray = idata.itsMaxFrameArray;
if (maxFrameArray != emptyStackTop + idata.itsMaxStack + 1)
Kit.codeBug();
Object[] stack;
double[] sDbl;
boolean stackReuse;
if (frame.stack != null && maxFrameArray <= frame.stack.length) {
// Reuse stacks from old frame
stackReuse = true;
stack = frame.stack;
sDbl = frame.sDbl;
} else {
stackReuse = false;
stack = new Object[maxFrameArray];
sDbl = new double[maxFrameArray];
}
int definedArgs = idata.argCount;
if (definedArgs > argCount) { definedArgs = argCount; }
// Fill the frame structure
frame.parentFrame = parentFrame;
frame.frameIndex = (parentFrame == null)
? 0 : parentFrame.frameIndex + 1;
if(frame.frameIndex > cx.getMaximumInterpreterStackDepth())
{
throw Context.reportRuntimeError("Exceeded maximum stack depth");
}
frame.frozen = false;
frame.fnOrScript = fnOrScript;
frame.idata = idata;
frame.stack = stack;
frame.sDbl = sDbl;
frame.varSource = frame;
frame.localShift = idata.itsMaxVars;
frame.emptyStackTop = emptyStackTop;
frame.debuggerFrame = debuggerFrame;
frame.useActivation = useActivation;
frame.thisObj = thisObj;
frame.scriptRegExps = scriptRegExps;
// Initialize initial values of variables that change during
// interpretation.
frame.result = Undefined.instance;
frame.pc = 0;
frame.pcPrevBranch = 0;
frame.pcSourceLineStart = idata.firstLinePC;
frame.scope = scope;
frame.savedStackTop = emptyStackTop;
frame.savedCallOp = 0;
System.arraycopy(args, argShift, stack, 0, definedArgs);
if (argsDbl != null) {
System.arraycopy(argsDbl, argShift, sDbl, 0, definedArgs);
}
for (int i = definedArgs; i != idata.itsMaxVars; ++i) {
stack[i] = Undefined.instance;
}
if (stackReuse) {
// Clean the stack part and space beyond stack if any
// of the old array to allow to GC objects there
for (int i = emptyStackTop + 1; i != stack.length; ++i) {
stack[i] = null;
}
}
enterFrame(cx, frame, args, false);
}
private static boolean isFrameEnterExitRequired(CallFrame frame)
{
return frame.debuggerFrame != null || frame.idata.itsNeedsActivation;
}
private static void enterFrame(Context cx, CallFrame frame, Object[] args, boolean continuationRestart)
{
boolean usesActivation = frame.idata.itsNeedsActivation;
boolean isDebugged = frame.debuggerFrame != null;
if(usesActivation || isDebugged) {
Scriptable scope = frame.scope;
if(scope == null) {
Kit.codeBug();
} else if(continuationRestart) {
// Walk the parent chain of frame.scope until a NativeCall is
// found. Normally, frame.scope is a NativeCall when called
// from initFrame() for a debugged or activatable function.
// However, when called from interpreterLoop() as part of
// restarting a continuation, it can also be a NativeWith if
// the continuation was captured within a "with" or "catch"
// block ("catch" implicitly uses NativeWith to create a scope
// to expose the exception variable).
for(;;) {
if(scope instanceof NativeCall) {
break;
} else {
scope = scope.getParentScope();
if(scope == null || (frame.parentFrame != null && frame.parentFrame.scope == scope)) {
// If we get here, we didn't find a NativeCall in
// the call chain before reaching parent frame's
// scope. This should not be possible.
Kit.codeBug();
break; // Never reached, but keeps the static analyzer happy about "scope" not being null 5 lines above.
}
}
}
}
if (isDebugged) {
frame.debuggerFrame.onEnter(cx, scope, frame.thisObj, args);
}
// Enter activation only when itsNeedsActivation true,
// since debugger should not interfere with activation
// chaining
if (usesActivation) {
ScriptRuntime.enterActivationFunction(cx, scope);
}
}
}
private static void exitFrame(Context cx, CallFrame frame,
Object throwable)
{
if (frame.idata.itsNeedsActivation) {
ScriptRuntime.exitActivationFunction(cx);
}
if (frame.debuggerFrame != null) {
try {
if (throwable instanceof Throwable) {
frame.debuggerFrame.onExit(cx, true, throwable);
} else {
Object result;
ContinuationJump cjump = (ContinuationJump)throwable;
if (cjump == null) {
result = frame.result;
} else {
result = cjump.result;
}
if (result == UniqueTag.DOUBLE_MARK) {
double resultDbl;
if (cjump == null) {
resultDbl = frame.resultDbl;
} else {
resultDbl = cjump.resultDbl;
}
result = ScriptRuntime.wrapNumber(resultDbl);
}
frame.debuggerFrame.onExit(cx, false, result);
}
} catch (Throwable ex) {
System.err.println(
"RHINO USAGE WARNING: onExit terminated with exception");
ex.printStackTrace(System.err);
}
}
}
private static void setCallResult(CallFrame frame,
Object callResult,
double callResultDbl)
{
if (frame.savedCallOp == Token.CALL) {
frame.stack[frame.savedStackTop] = callResult;
frame.sDbl[frame.savedStackTop] = callResultDbl;
} else if (frame.savedCallOp == Token.NEW) {
// If construct returns scriptable,
// then it replaces on stack top saved original instance
// of the object.
if (callResult instanceof Scriptable) {
frame.stack[frame.savedStackTop] = callResult;
}
} else {
Kit.codeBug();
}
frame.savedCallOp = 0;
}
private static void captureContinuation(Context cx, CallFrame frame,
int stackTop)
{
Continuation c = new Continuation();
ScriptRuntime.setObjectProtoAndParent(
c, ScriptRuntime.getTopCallScope(cx));
// Make sure that all frames upstack frames are frozen
CallFrame x = frame.parentFrame;
while (x != null && !x.frozen) {
x.frozen = true;
// Allow to GC unused stack space
for (int i = x.savedStackTop + 1; i != x.stack.length; ++i) {
// Allow to GC unused stack space
x.stack[i] = null;
}
if (x.savedCallOp == Token.CALL) {
// the call will always overwrite the stack top with the result
x.stack[x.savedStackTop] = null;
} else {
if (x.savedCallOp != Token.NEW) Kit.codeBug();
// the new operator uses stack top to store the constructed
// object so it shall not be cleared: see comments in
// setCallResult
}
x = x.parentFrame;
}
c.initImplementation(frame.parentFrame);
frame.stack[stackTop] = c;
}
private static int stack_int32(CallFrame frame, int i)
{
Object x = frame.stack[i];
double value;
if (x == UniqueTag.DOUBLE_MARK) {
value = frame.sDbl[i];
} else {
value = ScriptRuntime.toNumber(x);
}
return ScriptRuntime.toInt32(value);
}
private static double stack_double(CallFrame frame, int i)
{
Object x = frame.stack[i];
if (x != UniqueTag.DOUBLE_MARK) {
return ScriptRuntime.toNumber(x);
} else {
return frame.sDbl[i];
}
}
private static boolean stack_boolean(CallFrame frame, int i)
{
Object x = frame.stack[i];
if (x == Boolean.TRUE) {
return true;
} else if (x == Boolean.FALSE) {
return false;
} else if (x == UniqueTag.DOUBLE_MARK) {
double d = frame.sDbl[i];
return d == d && d != 0.0;
} else if (x == null || x == Undefined.instance) {
return false;
} else if (x instanceof Number) {
double d = ((Number)x).doubleValue();
return (d == d && d != 0.0);
} else if (x instanceof Boolean) {
return ((Boolean)x).booleanValue();
} else {
return ScriptRuntime.toBoolean(x);
}
}
private static void do_add(Object[] stack, double[] sDbl, int stackTop,
Context cx)
{
Object rhs = stack[stackTop + 1];
Object lhs = stack[stackTop];
double d;
boolean leftRightOrder;
if (rhs == UniqueTag.DOUBLE_MARK) {
d = sDbl[stackTop + 1];
if (lhs == UniqueTag.DOUBLE_MARK) {
sDbl[stackTop] += d;
return;
}
leftRightOrder = true;
// fallthrough to object + number code
} else if (lhs == UniqueTag.DOUBLE_MARK) {
d = sDbl[stackTop];
lhs = rhs;
leftRightOrder = false;
// fallthrough to object + number code
} else {
if (lhs instanceof Scriptable || rhs instanceof Scriptable) {
stack[stackTop] = ScriptRuntime.add(lhs, rhs, cx);
} else if (lhs instanceof String) {
String lstr = (String)lhs;
String rstr = ScriptRuntime.toString(rhs);
stack[stackTop] = lstr.concat(rstr);
} else if (rhs instanceof String) {
String lstr = ScriptRuntime.toString(lhs);
String rstr = (String)rhs;
stack[stackTop] = lstr.concat(rstr);
} else {
double lDbl = (lhs instanceof Number)
? ((Number)lhs).doubleValue() : ScriptRuntime.toNumber(lhs);
double rDbl = (rhs instanceof Number)
? ((Number)rhs).doubleValue() : ScriptRuntime.toNumber(rhs);
stack[stackTop] = UniqueTag.DOUBLE_MARK;
sDbl[stackTop] = lDbl + rDbl;
}
return;
}
// handle object(lhs) + number(d) code
if (lhs instanceof Scriptable) {
rhs = ScriptRuntime.wrapNumber(d);
if (!leftRightOrder) {
Object tmp = lhs;
lhs = rhs;
rhs = tmp;
}
stack[stackTop] = ScriptRuntime.add(lhs, rhs, cx);
} else if (lhs instanceof String) {
String lstr = (String)lhs;
String rstr = ScriptRuntime.toString(d);
if (leftRightOrder) {
stack[stackTop] = lstr.concat(rstr);
} else {
stack[stackTop] = rstr.concat(lstr);
}
} else {
double lDbl = (lhs instanceof Number)
? ((Number)lhs).doubleValue() : ScriptRuntime.toNumber(lhs);
stack[stackTop] = UniqueTag.DOUBLE_MARK;
sDbl[stackTop] = lDbl + d;
}
}
private static Object[] getArgsArray(Object[] stack, double[] sDbl,
int shift, int count)
{
if (count == 0) {
return ScriptRuntime.emptyArgs;
}
Object[] args = new Object[count];
for (int i = 0; i != count; ++i, ++shift) {
Object val = stack[shift];
if (val == UniqueTag.DOUBLE_MARK) {
val = ScriptRuntime.wrapNumber(sDbl[shift]);
}
args[i] = val;
}
return args;
}
private static void addInstructionCount(Context cx, CallFrame frame,
int extra)
{
cx.instructionCount += frame.pc - frame.pcPrevBranch + extra;
if (cx.instructionCount > cx.instructionThreshold) {
cx.observeInstructionCount(cx.instructionCount);
cx.instructionCount = 0;
}
}
}
| false
| true
|
private static Object interpretLoop(Context cx, CallFrame frame,
Object throwable)
{
// throwable holds exception object to rethrow or catch
// It is also used for continuation restart in which case
// it holds ContinuationJump
final Object DBL_MRK = UniqueTag.DOUBLE_MARK;
final Object undefined = Undefined.instance;
final boolean instructionCounting = (cx.instructionThreshold != 0);
// arbitrary number to add to instructionCount when calling
// other functions
final int INVOCATION_COST = 100;
// arbitrary exception cost for instruction counting
final int EXCEPTION_COST = 100;
String stringReg = null;
int indexReg = -1;
if (cx.lastInterpreterFrame != null) {
// save the top frame from the previous interpreterLoop
// invocation on the stack
if (cx.previousInterpreterInvocations == null) {
cx.previousInterpreterInvocations = new ObjArray();
}
cx.previousInterpreterInvocations.push(cx.lastInterpreterFrame);
}
// When restarting continuation throwable is not null and to jump
// to the code that rewind continuation state indexReg should be set
// to -1.
// With the normal call throable == null and indexReg == -1 allows to
// catch bugs with using indeReg to access array eleemnts before
// initializing indexReg.
if (throwable != null) {
// Assert assumptions
if (!(throwable instanceof ContinuationJump)) {
// It should be continuation
Kit.codeBug();
}
}
Object interpreterResult = null;
double interpreterResultDbl = 0.0;
StateLoop: for (;;) {
withoutExceptions: try {
if (throwable != null) {
// Recovering from exception, indexReg contains
// the index of handler
if (indexReg >= 0) {
// Normal excepton handler, transfer
// control appropriately
if (frame.frozen) {
// XXX Deal with exceptios!!!
frame = frame.cloneFrozen();
}
int[] table = frame.idata.itsExceptionTable;
frame.pc = table[indexReg + EXCEPTION_HANDLER_SLOT];
if (instructionCounting) {
frame.pcPrevBranch = frame.pc;
}
frame.savedStackTop = frame.emptyStackTop;
int scopeLocal = frame.localShift
+ table[indexReg
+ EXCEPTION_SCOPE_SLOT];
int exLocal = frame.localShift
+ table[indexReg
+ EXCEPTION_LOCAL_SLOT];
frame.scope = (Scriptable)frame.stack[scopeLocal];
frame.stack[exLocal] = throwable;
throwable = null;
} else {
// Continuation restoration
ContinuationJump cjump = (ContinuationJump)throwable;
// Clear throwable to indicate that execptions are OK
throwable = null;
if (cjump.branchFrame != frame) Kit.codeBug();
// Check that we have at least one frozen frame
// in the case of detached continuation restoration:
// unwind code ensure that
if (cjump.capturedFrame == null) Kit.codeBug();
// Need to rewind branchFrame, capturedFrame
// and all frames in between
int rewindCount = cjump.capturedFrame.frameIndex + 1;
if (cjump.branchFrame != null) {
rewindCount -= cjump.branchFrame.frameIndex;
}
int enterCount = 0;
CallFrame[] enterFrames = null;
CallFrame x = cjump.capturedFrame;
for (int i = 0; i != rewindCount; ++i) {
if (!x.frozen) Kit.codeBug();
if (isFrameEnterExitRequired(x)) {
if (enterFrames == null) {
// Allocate enough space to store the rest
// of rewind frames in case all of them
// would require to enter
enterFrames = new CallFrame[rewindCount
- i];
}
enterFrames[enterCount] = x;
++enterCount;
}
x = x.parentFrame;
}
while (enterCount != 0) {
// execute enter: walk enterFrames in the reverse
// order since they were stored starting from
// the capturedFrame, not branchFrame
--enterCount;
x = enterFrames[enterCount];
enterFrame(cx, x, ScriptRuntime.emptyArgs, true);
}
// Continuation jump is almost done: capturedFrame
// points to the call to the function that captured
// continuation, so clone capturedFrame and
// emulate return that function with the suplied result
frame = cjump.capturedFrame.cloneFrozen();
setCallResult(frame, cjump.result, cjump.resultDbl);
// restart the execution
}
} else {
if (frame.frozen) Kit.codeBug();
}
// Use local variables for constant values in frame
// for faster access
Object[] stack = frame.stack;
double[] sDbl = frame.sDbl;
Object[] vars = frame.varSource.stack;
double[] varDbls = frame.varSource.sDbl;
byte[] iCode = frame.idata.itsICode;
String[] strings = frame.idata.itsStringTable;
// Use local for stackTop as well. Since execption handlers
// can only exist at statement level where stack is empty,
// it is necessary to save/restore stackTop only accross
// function calls and normal returns.
int stackTop = frame.savedStackTop;
// Store new frame in cx which is used for error reporting etc.
cx.lastInterpreterFrame = frame;
Loop: for (;;) {
// Exception handler assumes that PC is already incremented
// pass the instruction start when it searches the
// exception handler
int op = iCode[frame.pc++];
jumplessRun: {
// Back indent to ease imlementation reading
switch (op) {
case Token.THROW: {
Object value = stack[stackTop];
if (value == DBL_MRK) value = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
int sourceLine = getIndex(iCode, frame.pc);
throwable = new JavaScriptException(value,
frame.idata.itsSourceFile,
sourceLine);
break withoutExceptions;
}
case Token.RETHROW: {
indexReg += frame.localShift;
throwable = stack[indexReg];
break withoutExceptions;
}
case Token.GE :
case Token.LE :
case Token.GT :
case Token.LT : {
--stackTop;
Object rhs = stack[stackTop + 1];
Object lhs = stack[stackTop];
boolean valBln;
object_compare:
{
number_compare:
{
double rDbl, lDbl;
if (rhs == DBL_MRK) {
rDbl = sDbl[stackTop + 1];
lDbl = stack_double(frame, stackTop);
} else if (lhs == DBL_MRK) {
rDbl = ScriptRuntime.toNumber(rhs);
lDbl = sDbl[stackTop];
} else {
break number_compare;
}
switch (op) {
case Token.GE:
valBln = (lDbl >= rDbl);
break object_compare;
case Token.LE:
valBln = (lDbl <= rDbl);
break object_compare;
case Token.GT:
valBln = (lDbl > rDbl);
break object_compare;
case Token.LT:
valBln = (lDbl < rDbl);
break object_compare;
default:
throw Kit.codeBug();
}
}
switch (op) {
case Token.GE:
valBln = ScriptRuntime.cmp_LE(rhs, lhs);
break;
case Token.LE:
valBln = ScriptRuntime.cmp_LE(lhs, rhs);
break;
case Token.GT:
valBln = ScriptRuntime.cmp_LT(rhs, lhs);
break;
case Token.LT:
valBln = ScriptRuntime.cmp_LT(lhs, rhs);
break;
default:
throw Kit.codeBug();
}
}
stack[stackTop] = ScriptRuntime.wrapBoolean(valBln);
continue Loop;
}
case Token.IN :
case Token.INSTANCEOF : {
Object rhs = stack[stackTop];
if (rhs == DBL_MRK) rhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
boolean valBln;
if (op == Token.IN) {
valBln = ScriptRuntime.in(lhs, rhs, cx);
} else {
valBln = ScriptRuntime.instanceOf(lhs, rhs, cx);
}
stack[stackTop] = ScriptRuntime.wrapBoolean(valBln);
continue Loop;
}
case Token.EQ :
case Token.NE : {
--stackTop;
boolean valBln;
Object rhs = stack[stackTop + 1];
Object lhs = stack[stackTop];
if (rhs == DBL_MRK) {
if (lhs == DBL_MRK) {
valBln = (sDbl[stackTop] == sDbl[stackTop + 1]);
} else {
valBln = ScriptRuntime.eqNumber(sDbl[stackTop + 1], lhs);
}
} else {
if (lhs == DBL_MRK) {
valBln = ScriptRuntime.eqNumber(sDbl[stackTop], rhs);
} else {
valBln = ScriptRuntime.eq(lhs, rhs);
}
}
valBln ^= (op == Token.NE);
stack[stackTop] = ScriptRuntime.wrapBoolean(valBln);
continue Loop;
}
case Token.SHEQ :
case Token.SHNE : {
--stackTop;
Object rhs = stack[stackTop + 1];
Object lhs = stack[stackTop];
boolean valBln;
shallow_compare: {
double rdbl, ldbl;
if (rhs == DBL_MRK) {
rdbl = sDbl[stackTop + 1];
if (lhs == DBL_MRK) {
ldbl = sDbl[stackTop];
} else if (lhs instanceof Number) {
ldbl = ((Number)lhs).doubleValue();
} else {
valBln = false;
break shallow_compare;
}
} else if (lhs == DBL_MRK) {
ldbl = sDbl[stackTop];
if (rhs == DBL_MRK) {
rdbl = sDbl[stackTop + 1];
} else if (rhs instanceof Number) {
rdbl = ((Number)rhs).doubleValue();
} else {
valBln = false;
break shallow_compare;
}
} else {
valBln = ScriptRuntime.shallowEq(lhs, rhs);
break shallow_compare;
}
valBln = (ldbl == rdbl);
}
valBln ^= (op == Token.SHNE);
stack[stackTop] = ScriptRuntime.wrapBoolean(valBln);
continue Loop;
}
case Token.IFNE :
if (stack_boolean(frame, stackTop--)) {
frame.pc += 2;
continue Loop;
}
break jumplessRun;
case Token.IFEQ :
if (!stack_boolean(frame, stackTop--)) {
frame.pc += 2;
continue Loop;
}
break jumplessRun;
case Icode_IFEQ_POP :
if (!stack_boolean(frame, stackTop--)) {
frame.pc += 2;
continue Loop;
}
stack[stackTop--] = null;
break jumplessRun;
case Token.GOTO :
break jumplessRun;
case Icode_GOSUB :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = frame.pc + 2;
break jumplessRun;
case Icode_STARTSUB :
if (stackTop == frame.emptyStackTop + 1) {
// Call from Icode_GOSUB: store return PC address in the local
indexReg += frame.localShift;
stack[indexReg] = stack[stackTop];
sDbl[indexReg] = sDbl[stackTop];
--stackTop;
} else {
// Call from exception handler: exception object is already stored
// in the local
if (stackTop != frame.emptyStackTop) Kit.codeBug();
}
continue Loop;
case Icode_RETSUB : {
// indexReg: local to store return address
if (instructionCounting) {
addInstructionCount(cx, frame, 0);
}
indexReg += frame.localShift;
Object value = stack[indexReg];
if (value != DBL_MRK) {
// Invocation from exception handler, restore object to rethrow
throwable = value;
break withoutExceptions;
}
// Normal return from GOSUB
frame.pc = (int)sDbl[indexReg];
if (instructionCounting) {
frame.pcPrevBranch = frame.pc;
}
continue Loop;
}
case Icode_POP :
stack[stackTop] = null;
stackTop--;
continue Loop;
case Icode_POP_RESULT :
frame.result = stack[stackTop];
frame.resultDbl = sDbl[stackTop];
stack[stackTop] = null;
--stackTop;
continue Loop;
case Icode_DUP :
stack[stackTop + 1] = stack[stackTop];
sDbl[stackTop + 1] = sDbl[stackTop];
stackTop++;
continue Loop;
case Icode_DUP2 :
stack[stackTop + 1] = stack[stackTop - 1];
sDbl[stackTop + 1] = sDbl[stackTop - 1];
stack[stackTop + 2] = stack[stackTop];
sDbl[stackTop + 2] = sDbl[stackTop];
stackTop += 2;
continue Loop;
case Icode_SWAP : {
Object o = stack[stackTop];
stack[stackTop] = stack[stackTop - 1];
stack[stackTop - 1] = o;
double d = sDbl[stackTop];
sDbl[stackTop] = sDbl[stackTop - 1];
sDbl[stackTop - 1] = d;
continue Loop;
}
case Token.RETURN :
frame.result = stack[stackTop];
frame.resultDbl = sDbl[stackTop];
--stackTop;
break Loop;
case Token.RETURN_RESULT :
break Loop;
case Icode_RETUNDEF :
frame.result = undefined;
break Loop;
case Token.BITNOT : {
int rIntValue = stack_int32(frame, stackTop);
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = ~rIntValue;
continue Loop;
}
case Token.BITAND :
case Token.BITOR :
case Token.BITXOR :
case Token.LSH :
case Token.RSH : {
int rIntValue = stack_int32(frame, stackTop);
--stackTop;
int lIntValue = stack_int32(frame, stackTop);
stack[stackTop] = DBL_MRK;
switch (op) {
case Token.BITAND:
lIntValue &= rIntValue;
break;
case Token.BITOR:
lIntValue |= rIntValue;
break;
case Token.BITXOR:
lIntValue ^= rIntValue;
break;
case Token.LSH:
lIntValue <<= rIntValue;
break;
case Token.RSH:
lIntValue >>= rIntValue;
break;
}
sDbl[stackTop] = lIntValue;
continue Loop;
}
case Token.URSH : {
int rIntValue = stack_int32(frame, stackTop) & 0x1F;
--stackTop;
double lDbl = stack_double(frame, stackTop);
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = ScriptRuntime.toUint32(lDbl) >>> rIntValue;
continue Loop;
}
case Token.NEG :
case Token.POS : {
double rDbl = stack_double(frame, stackTop);
stack[stackTop] = DBL_MRK;
if (op == Token.NEG) {
rDbl = -rDbl;
}
sDbl[stackTop] = rDbl;
continue Loop;
}
case Token.ADD :
--stackTop;
do_add(stack, sDbl, stackTop, cx);
continue Loop;
case Token.SUB :
case Token.MUL :
case Token.DIV :
case Token.MOD : {
double rDbl = stack_double(frame, stackTop);
--stackTop;
double lDbl = stack_double(frame, stackTop);
stack[stackTop] = DBL_MRK;
switch (op) {
case Token.SUB:
lDbl -= rDbl;
break;
case Token.MUL:
lDbl *= rDbl;
break;
case Token.DIV:
lDbl /= rDbl;
break;
case Token.MOD:
lDbl %= rDbl;
break;
}
sDbl[stackTop] = lDbl;
continue Loop;
}
case Token.NOT :
stack[stackTop] = ScriptRuntime.wrapBoolean(
!stack_boolean(frame, stackTop));
continue Loop;
case Token.BINDNAME :
stack[++stackTop] = ScriptRuntime.bind(cx, frame.scope, stringReg);
continue Loop;
case Token.SETNAME : {
Object rhs = stack[stackTop];
if (rhs == DBL_MRK) rhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Scriptable lhs = (Scriptable)stack[stackTop];
stack[stackTop] = ScriptRuntime.setName(lhs, rhs, cx,
frame.scope, stringReg);
continue Loop;
}
case Token.DELPROP : {
Object rhs = stack[stackTop];
if (rhs == DBL_MRK) rhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.delete(lhs, rhs, cx);
continue Loop;
}
case Token.GETPROP : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.getObjectProp(lhs, stringReg, cx);
continue Loop;
}
case Token.SETPROP : {
Object rhs = stack[stackTop];
if (rhs == DBL_MRK) rhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.setObjectProp(lhs, stringReg, rhs,
cx);
continue Loop;
}
case Icode_PROP_INC_DEC : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.propIncrDecr(lhs, stringReg,
cx, iCode[frame.pc]);
++frame.pc;
continue Loop;
}
case Token.GETELEM : {
--stackTop;
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) {
lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
}
Object value;
Object id = stack[stackTop + 1];
if (id != DBL_MRK) {
value = ScriptRuntime.getObjectElem(lhs, id, cx);
} else {
double d = sDbl[stackTop + 1];
value = ScriptRuntime.getObjectIndex(lhs, d, cx);
}
stack[stackTop] = value;
continue Loop;
}
case Token.SETELEM : {
stackTop -= 2;
Object rhs = stack[stackTop + 2];
if (rhs == DBL_MRK) {
rhs = ScriptRuntime.wrapNumber(sDbl[stackTop + 2]);
}
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) {
lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
}
Object value;
Object id = stack[stackTop + 1];
if (id != DBL_MRK) {
value = ScriptRuntime.setObjectElem(lhs, id, rhs, cx);
} else {
double d = sDbl[stackTop + 1];
value = ScriptRuntime.setObjectIndex(lhs, d, rhs, cx);
}
stack[stackTop] = value;
continue Loop;
}
case Icode_ELEM_INC_DEC: {
Object rhs = stack[stackTop];
if (rhs == DBL_MRK) rhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.elemIncrDecr(lhs, rhs, cx,
iCode[frame.pc]);
++frame.pc;
continue Loop;
}
case Token.GET_REF : {
Ref ref = (Ref)stack[stackTop];
stack[stackTop] = ScriptRuntime.refGet(ref, cx);
continue Loop;
}
case Token.SET_REF : {
Object value = stack[stackTop];
if (value == DBL_MRK) value = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Ref ref = (Ref)stack[stackTop];
stack[stackTop] = ScriptRuntime.refSet(ref, value, cx);
continue Loop;
}
case Token.DEL_REF : {
Ref ref = (Ref)stack[stackTop];
stack[stackTop] = ScriptRuntime.refDel(ref, cx);
continue Loop;
}
case Icode_REF_INC_DEC : {
Ref ref = (Ref)stack[stackTop];
stack[stackTop] = ScriptRuntime.refIncrDecr(ref, cx, iCode[frame.pc]);
++frame.pc;
continue Loop;
}
case Token.LOCAL_LOAD :
++stackTop;
indexReg += frame.localShift;
stack[stackTop] = stack[indexReg];
sDbl[stackTop] = sDbl[indexReg];
continue Loop;
case Icode_LOCAL_CLEAR :
indexReg += frame.localShift;
stack[indexReg] = null;
continue Loop;
case Icode_NAME_AND_THIS :
// stringReg: name
++stackTop;
stack[stackTop] = ScriptRuntime.getNameFunctionAndThis(stringReg,
cx, frame.scope);
++stackTop;
stack[stackTop] = ScriptRuntime.lastStoredScriptable(cx);
continue Loop;
case Icode_PROP_AND_THIS: {
Object obj = stack[stackTop];
if (obj == DBL_MRK) obj = ScriptRuntime.wrapNumber(sDbl[stackTop]);
// stringReg: property
stack[stackTop] = ScriptRuntime.getPropFunctionAndThis(obj, stringReg,
cx);
++stackTop;
stack[stackTop] = ScriptRuntime.lastStoredScriptable(cx);
continue Loop;
}
case Icode_ELEM_AND_THIS: {
Object obj = stack[stackTop - 1];
if (obj == DBL_MRK) obj = ScriptRuntime.wrapNumber(sDbl[stackTop - 1]);
Object id = stack[stackTop];
if (id == DBL_MRK) id = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop - 1] = ScriptRuntime.getElemFunctionAndThis(obj, id, cx);
stack[stackTop] = ScriptRuntime.lastStoredScriptable(cx);
continue Loop;
}
case Icode_VALUE_AND_THIS : {
Object value = stack[stackTop];
if (value == DBL_MRK) value = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.getValueFunctionAndThis(value, cx);
++stackTop;
stack[stackTop] = ScriptRuntime.lastStoredScriptable(cx);
continue Loop;
}
case Icode_CALLSPECIAL : {
if (instructionCounting) {
cx.instructionCount += INVOCATION_COST;
}
int callType = iCode[frame.pc] & 0xFF;
boolean isNew = (iCode[frame.pc + 1] != 0);
int sourceLine = getIndex(iCode, frame.pc + 2);
// indexReg: number of arguments
if (isNew) {
// stack change: function arg0 .. argN -> newResult
stackTop -= indexReg;
Object function = stack[stackTop];
if (function == DBL_MRK)
function = ScriptRuntime.wrapNumber(sDbl[stackTop]);
Object[] outArgs = getArgsArray(
stack, sDbl, stackTop + 1, indexReg);
stack[stackTop] = ScriptRuntime.newSpecial(
cx, function, outArgs, frame.scope, callType);
} else {
// stack change: function thisObj arg0 .. argN -> result
stackTop -= 1 + indexReg;
// Call code generation ensure that stack here
// is ... Callable Scriptable
Scriptable functionThis = (Scriptable)stack[stackTop + 1];
Callable function = (Callable)stack[stackTop];
Object[] outArgs = getArgsArray(
stack, sDbl, stackTop + 2, indexReg);
stack[stackTop] = ScriptRuntime.callSpecial(
cx, function, functionThis, outArgs,
frame.scope, frame.thisObj, callType,
frame.idata.itsSourceFile, sourceLine);
}
frame.pc += 4;
continue Loop;
}
case Token.CALL :
case Icode_TAIL_CALL :
case Token.REF_CALL : {
if (instructionCounting) {
cx.instructionCount += INVOCATION_COST;
}
// stack change: function thisObj arg0 .. argN -> result
// indexReg: number of arguments
stackTop -= 1 + indexReg;
// CALL generation ensures that fun and funThisObj
// are already Scriptable and Callable objects respectively
Callable fun = (Callable)stack[stackTop];
Scriptable funThisObj = (Scriptable)stack[stackTop + 1];
if (op == Token.REF_CALL) {
Object[] outArgs = getArgsArray(stack, sDbl, stackTop + 2,
indexReg);
stack[stackTop] = ScriptRuntime.callRef(fun, funThisObj,
outArgs, cx);
continue Loop;
}
Scriptable calleeScope = frame.scope;
if (frame.useActivation) {
calleeScope = ScriptableObject.getTopLevelScope(frame.scope);
}
if (fun instanceof InterpretedFunction) {
InterpretedFunction ifun = (InterpretedFunction)fun;
if (frame.fnOrScript.securityDomain == ifun.securityDomain) {
CallFrame callParentFrame = frame;
CallFrame calleeFrame = new CallFrame();
if (op == Icode_TAIL_CALL) {
// In principle tail call can re-use the current
// frame and its stack arrays but it is hard to
// do properly. Any exceptions that can legally
// happen during frame re-initialization including
// StackOverflowException during innocent looking
// System.arraycopy may leave the current frame
// data corrupted leading to undefined behaviour
// in the catch code bellow that unwinds JS stack
// on exceptions. Then there is issue about frame release
// end exceptions there.
// To avoid frame allocation a released frame
// can be cached for re-use which would also benefit
// non-tail calls but it is not clear that this caching
// would gain in performance due to potentially
// bad iteraction with GC.
callParentFrame = frame.parentFrame;
}
initFrame(cx, calleeScope, funThisObj, stack, sDbl,
stackTop + 2, indexReg, ifun, callParentFrame,
calleeFrame);
if (op == Icode_TAIL_CALL) {
// Release the parent
exitFrame(cx, frame, null);
} else {
frame.savedStackTop = stackTop;
frame.savedCallOp = op;
}
frame = calleeFrame;
continue StateLoop;
}
}
if (fun instanceof Continuation) {
// Jump to the captured continuation
ContinuationJump cjump;
cjump = new ContinuationJump((Continuation)fun, frame);
// continuation result is the first argument if any
// of contination call
if (indexReg == 0) {
cjump.result = undefined;
} else {
cjump.result = stack[stackTop + 2];
cjump.resultDbl = sDbl[stackTop + 2];
}
// Start the real unwind job
throwable = cjump;
break withoutExceptions;
}
if (fun instanceof IdFunctionObject) {
IdFunctionObject ifun = (IdFunctionObject)fun;
if (Continuation.isContinuationConstructor(ifun)) {
captureContinuation(cx, frame, stackTop);
continue Loop;
}
}
Object[] outArgs = getArgsArray(stack, sDbl, stackTop + 2,
indexReg);
stack[stackTop] = fun.call(cx, calleeScope, funThisObj, outArgs);
continue Loop;
}
case Token.NEW : {
if (instructionCounting) {
cx.instructionCount += INVOCATION_COST;
}
// stack change: function arg0 .. argN -> newResult
// indexReg: number of arguments
stackTop -= indexReg;
Object lhs = stack[stackTop];
if (lhs instanceof InterpretedFunction) {
InterpretedFunction f = (InterpretedFunction)lhs;
if (frame.fnOrScript.securityDomain == f.securityDomain) {
Scriptable newInstance = f.createObject(cx, frame.scope);
CallFrame calleeFrame = new CallFrame();
initFrame(cx, frame.scope, newInstance, stack, sDbl,
stackTop + 1, indexReg, f, frame,
calleeFrame);
stack[stackTop] = newInstance;
frame.savedStackTop = stackTop;
frame.savedCallOp = op;
frame = calleeFrame;
continue StateLoop;
}
}
if (!(lhs instanceof Function)) {
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
throw ScriptRuntime.notFunctionError(lhs);
}
Function fun = (Function)lhs;
if (fun instanceof IdFunctionObject) {
IdFunctionObject ifun = (IdFunctionObject)fun;
if (Continuation.isContinuationConstructor(ifun)) {
captureContinuation(cx, frame, stackTop);
continue Loop;
}
}
Object[] outArgs = getArgsArray(stack, sDbl, stackTop + 1, indexReg);
stack[stackTop] = fun.construct(cx, frame.scope, outArgs);
continue Loop;
}
case Token.TYPEOF : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.typeof(lhs);
continue Loop;
}
case Icode_TYPEOFNAME :
stack[++stackTop] = ScriptRuntime.typeofName(frame.scope, stringReg);
continue Loop;
case Token.STRING :
stack[++stackTop] = stringReg;
continue Loop;
case Icode_SHORTNUMBER :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = getShort(iCode, frame.pc);
frame.pc += 2;
continue Loop;
case Icode_INTNUMBER :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = getInt(iCode, frame.pc);
frame.pc += 4;
continue Loop;
case Token.NUMBER :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = frame.idata.itsDoubleTable[indexReg];
continue Loop;
case Token.NAME :
stack[++stackTop] = ScriptRuntime.name(cx, frame.scope, stringReg);
continue Loop;
case Icode_NAME_INC_DEC :
stack[++stackTop] = ScriptRuntime.nameIncrDecr(frame.scope, stringReg,
iCode[frame.pc]);
++frame.pc;
continue Loop;
case Icode_SETVAR1:
indexReg = iCode[frame.pc++];
// fallthrough
case Token.SETVAR :
if (!frame.useActivation) {
vars[indexReg] = stack[stackTop];
varDbls[indexReg] = sDbl[stackTop];
} else {
Object val = stack[stackTop];
if (val == DBL_MRK) val = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stringReg = frame.idata.argNames[indexReg];
frame.scope.put(stringReg, frame.scope, val);
}
continue Loop;
case Icode_GETVAR1:
indexReg = iCode[frame.pc++];
// fallthrough
case Token.GETVAR :
++stackTop;
if (!frame.useActivation) {
stack[stackTop] = vars[indexReg];
sDbl[stackTop] = varDbls[indexReg];
} else {
stringReg = frame.idata.argNames[indexReg];
stack[stackTop] = frame.scope.get(stringReg, frame.scope);
}
continue Loop;
case Icode_VAR_INC_DEC : {
// indexReg : varindex
++stackTop;
int incrDecrMask = iCode[frame.pc];
if (!frame.useActivation) {
stack[stackTop] = DBL_MRK;
Object varValue = vars[indexReg];
double d;
if (varValue == DBL_MRK) {
d = varDbls[indexReg];
} else {
d = ScriptRuntime.toNumber(varValue);
vars[indexReg] = DBL_MRK;
}
double d2 = ((incrDecrMask & Node.DECR_FLAG) == 0)
? d + 1.0 : d - 1.0;
varDbls[indexReg] = d2;
sDbl[stackTop] = ((incrDecrMask & Node.POST_FLAG) == 0) ? d2 : d;
} else {
String varName = frame.idata.argNames[indexReg];
stack[stackTop] = ScriptRuntime.nameIncrDecr(frame.scope, varName,
incrDecrMask);
}
++frame.pc;
continue Loop;
}
case Icode_ZERO :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = 0;
continue Loop;
case Icode_ONE :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = 1;
continue Loop;
case Token.NULL :
stack[++stackTop] = null;
continue Loop;
case Token.THIS :
stack[++stackTop] = frame.thisObj;
continue Loop;
case Token.THISFN :
stack[++stackTop] = frame.fnOrScript;
continue Loop;
case Token.FALSE :
stack[++stackTop] = Boolean.FALSE;
continue Loop;
case Token.TRUE :
stack[++stackTop] = Boolean.TRUE;
continue Loop;
case Icode_UNDEF :
stack[++stackTop] = undefined;
continue Loop;
case Token.ENTERWITH : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
frame.scope = ScriptRuntime.enterWith(lhs, cx, frame.scope);
continue Loop;
}
case Token.LEAVEWITH :
frame.scope = ScriptRuntime.leaveWith(frame.scope);
continue Loop;
case Token.CATCH_SCOPE : {
// stack top: exception object
// stringReg: name of exception variable
// indexReg: local for exception scope
--stackTop;
indexReg += frame.localShift;
boolean afterFirstScope = (frame.idata.itsICode[frame.pc] != 0);
Throwable caughtException = (Throwable)stack[stackTop + 1];
Scriptable lastCatchScope;
if (!afterFirstScope) {
lastCatchScope = null;
} else {
lastCatchScope = (Scriptable)stack[indexReg];
}
stack[indexReg] = ScriptRuntime.newCatchScope(caughtException,
lastCatchScope, stringReg,
cx, frame.scope);
++frame.pc;
continue Loop;
}
case Token.ENUM_INIT_KEYS :
case Token.ENUM_INIT_VALUES : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
indexReg += frame.localShift;
stack[indexReg] = ScriptRuntime.enumInit(
lhs, cx, (op == Token.ENUM_INIT_VALUES));
continue Loop;
}
case Token.ENUM_NEXT :
case Token.ENUM_ID : {
indexReg += frame.localShift;
Object val = stack[indexReg];
++stackTop;
stack[stackTop] = (op == Token.ENUM_NEXT)
? (Object)ScriptRuntime.enumNext(val)
: (Object)ScriptRuntime.enumId(val, cx);
continue Loop;
}
case Token.REF_SPECIAL : {
//stringReg: name of special property
Object obj = stack[stackTop];
if (obj == DBL_MRK) obj = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.specialRef(obj, stringReg, cx);
continue Loop;
}
case Token.REF_MEMBER: {
//indexReg: flags
Object elem = stack[stackTop];
if (elem == DBL_MRK) elem = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object obj = stack[stackTop];
if (obj == DBL_MRK) obj = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.memberRef(obj, elem, cx, indexReg);
continue Loop;
}
case Token.REF_NS_MEMBER: {
//indexReg: flags
Object elem = stack[stackTop];
if (elem == DBL_MRK) elem = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object ns = stack[stackTop];
if (ns == DBL_MRK) ns = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object obj = stack[stackTop];
if (obj == DBL_MRK) obj = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.memberRef(obj, ns, elem, cx, indexReg);
continue Loop;
}
case Token.REF_NAME: {
//indexReg: flags
Object name = stack[stackTop];
if (name == DBL_MRK) name = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.nameRef(name, cx, frame.scope,
indexReg);
continue Loop;
}
case Token.REF_NS_NAME: {
//indexReg: flags
Object name = stack[stackTop];
if (name == DBL_MRK) name = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object ns = stack[stackTop];
if (ns == DBL_MRK) ns = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.nameRef(ns, name, cx, frame.scope,
indexReg);
continue Loop;
}
case Icode_SCOPE_LOAD :
indexReg += frame.localShift;
frame.scope = (Scriptable)stack[indexReg];
continue Loop;
case Icode_SCOPE_SAVE :
indexReg += frame.localShift;
stack[indexReg] = frame.scope;
continue Loop;
case Icode_CLOSURE_EXPR :
stack[++stackTop] = InterpretedFunction.createFunction(cx, frame.scope,
frame.fnOrScript,
indexReg);
continue Loop;
case Icode_CLOSURE_STMT :
initFunction(cx, frame.scope, frame.fnOrScript, indexReg);
continue Loop;
case Token.REGEXP :
stack[++stackTop] = frame.scriptRegExps[indexReg];
continue Loop;
case Icode_LITERAL_NEW :
// indexReg: number of values in the literal
++stackTop;
stack[stackTop] = new Object[indexReg];
sDbl[stackTop] = 0;
continue Loop;
case Icode_LITERAL_SET : {
Object value = stack[stackTop];
if (value == DBL_MRK) value = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
int i = (int)sDbl[stackTop];
((Object[])stack[stackTop])[i] = value;
sDbl[stackTop] = i + 1;
continue Loop;
}
case Token.ARRAYLIT :
case Icode_SPARE_ARRAYLIT :
case Token.OBJECTLIT : {
Object[] data = (Object[])stack[stackTop];
Object val;
if (op == Token.OBJECTLIT) {
Object[] ids = (Object[])frame.idata.literalIds[indexReg];
val = ScriptRuntime.newObjectLiteral(ids, data, cx, frame.scope);
} else {
int[] skipIndexces = null;
if (op == Icode_SPARE_ARRAYLIT) {
skipIndexces = (int[])frame.idata.literalIds[indexReg];
}
val = ScriptRuntime.newArrayLiteral(data, skipIndexces, cx,
frame.scope);
}
stack[stackTop] = val;
continue Loop;
}
case Icode_ENTERDQ : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
frame.scope = ScriptRuntime.enterDotQuery(lhs, frame.scope);
continue Loop;
}
case Icode_LEAVEDQ : {
boolean valBln = stack_boolean(frame, stackTop);
Object x = ScriptRuntime.updateDotQuery(valBln, frame.scope);
if (x != null) {
stack[stackTop] = x;
frame.scope = ScriptRuntime.leaveDotQuery(frame.scope);
frame.pc += 2;
continue Loop;
}
// reset stack and PC to code after ENTERDQ
--stackTop;
break jumplessRun;
}
case Token.DEFAULTNAMESPACE : {
Object value = stack[stackTop];
if (value == DBL_MRK) value = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.setDefaultNamespace(value, cx);
continue Loop;
}
case Token.ESCXMLATTR : {
Object value = stack[stackTop];
if (value != DBL_MRK) {
stack[stackTop] = ScriptRuntime.escapeAttributeValue(value, cx);
}
continue Loop;
}
case Token.ESCXMLTEXT : {
Object value = stack[stackTop];
if (value != DBL_MRK) {
stack[stackTop] = ScriptRuntime.escapeTextValue(value, cx);
}
continue Loop;
}
case Icode_LINE :
frame.pcSourceLineStart = frame.pc;
if (frame.debuggerFrame != null) {
int line = getIndex(iCode, frame.pc);
frame.debuggerFrame.onLineChange(cx, line);
}
frame.pc += 2;
continue Loop;
case Icode_REG_IND_C0:
indexReg = 0;
continue Loop;
case Icode_REG_IND_C1:
indexReg = 1;
continue Loop;
case Icode_REG_IND_C2:
indexReg = 2;
continue Loop;
case Icode_REG_IND_C3:
indexReg = 3;
continue Loop;
case Icode_REG_IND_C4:
indexReg = 4;
continue Loop;
case Icode_REG_IND_C5:
indexReg = 5;
continue Loop;
case Icode_REG_IND1:
indexReg = 0xFF & iCode[frame.pc];
++frame.pc;
continue Loop;
case Icode_REG_IND2:
indexReg = getIndex(iCode, frame.pc);
frame.pc += 2;
continue Loop;
case Icode_REG_IND4:
indexReg = getInt(iCode, frame.pc);
frame.pc += 4;
continue Loop;
case Icode_REG_STR_C0:
stringReg = strings[0];
continue Loop;
case Icode_REG_STR_C1:
stringReg = strings[1];
continue Loop;
case Icode_REG_STR_C2:
stringReg = strings[2];
continue Loop;
case Icode_REG_STR_C3:
stringReg = strings[3];
continue Loop;
case Icode_REG_STR1:
stringReg = strings[0xFF & iCode[frame.pc]];
++frame.pc;
continue Loop;
case Icode_REG_STR2:
stringReg = strings[getIndex(iCode, frame.pc)];
frame.pc += 2;
continue Loop;
case Icode_REG_STR4:
stringReg = strings[getInt(iCode, frame.pc)];
frame.pc += 4;
continue Loop;
default :
dumpICode(frame.idata);
throw new RuntimeException(
"Unknown icode : "+op+" @ pc : "+(frame.pc-1));
} // end of interpreter switch
} // end of jumplessRun label block
// This should be reachable only for jump implementation
// when pc points to encoded target offset
if (instructionCounting) {
addInstructionCount(cx, frame, 2);
}
int offset = getShort(iCode, frame.pc);
if (offset != 0) {
// -1 accounts for pc pointing to jump opcode + 1
frame.pc += offset - 1;
} else {
frame.pc = frame.idata.longJumps.
getExistingInt(frame.pc);
}
if (instructionCounting) {
frame.pcPrevBranch = frame.pc;
}
continue Loop;
} // end of Loop: for
exitFrame(cx, frame, null);
interpreterResult = frame.result;
interpreterResultDbl = frame.resultDbl;
if (frame.parentFrame != null) {
frame = frame.parentFrame;
if (frame.frozen) {
frame = frame.cloneFrozen();
}
setCallResult(
frame, interpreterResult, interpreterResultDbl);
interpreterResult = null; // Help GC
continue StateLoop;
}
break StateLoop;
} // end of interpreter withoutExceptions: try
catch (Throwable ex) {
if (throwable != null) {
// This is serious bug and it is better to track it ASAP
ex.printStackTrace(System.err);
throw new IllegalStateException();
}
throwable = ex;
}
// This should be reachable only after above catch or from
// finally when it needs to propagate exception or from
// explicit throw
if (throwable == null) Kit.codeBug();
// Exception type
final int EX_CATCH_STATE = 2; // Can execute JS catch
final int EX_FINALLY_STATE = 1; // Can execute JS finally
final int EX_NO_JS_STATE = 0; // Terminate JS execution
int exState;
ContinuationJump cjump = null;
if (throwable instanceof JavaScriptException) {
exState = EX_CATCH_STATE;
} else if (throwable instanceof EcmaError) {
// an offical ECMA error object,
exState = EX_CATCH_STATE;
} else if (throwable instanceof EvaluatorException) {
exState = EX_CATCH_STATE;
} else if (throwable instanceof RuntimeException) {
exState = EX_FINALLY_STATE;
} else if (throwable instanceof Error) {
exState = EX_NO_JS_STATE;
} else {
// It must be ContinuationJump
exState = EX_FINALLY_STATE;
cjump = (ContinuationJump)throwable;
}
if (instructionCounting) {
try {
addInstructionCount(cx, frame, EXCEPTION_COST);
} catch (RuntimeException ex) {
throwable = ex;
exState = EX_FINALLY_STATE;
} catch (Error ex) {
// Error from instruction counting
// => unconditionally terminate JS
throwable = ex;
cjump = null;
exState = EX_NO_JS_STATE;
}
}
if (frame.debuggerFrame != null
&& throwable instanceof RuntimeException)
{
// Call debugger only for RuntimeException
RuntimeException rex = (RuntimeException)throwable;
try {
frame.debuggerFrame.onExceptionThrown(cx, rex);
} catch (Throwable ex) {
// Any exception from debugger
// => unconditionally terminate JS
throwable = ex;
cjump = null;
exState = EX_NO_JS_STATE;
}
}
for (;;) {
if (exState != EX_NO_JS_STATE) {
boolean onlyFinally = (exState != EX_CATCH_STATE);
indexReg = getExceptionHandler(frame, onlyFinally);
if (indexReg >= 0) {
// We caught an exception, restart the loop
// with exception pending the processing at the loop
// start
continue StateLoop;
}
}
// No allowed execption handlers in this frame, unwind
// to parent and try to look there
exitFrame(cx, frame, throwable);
frame = frame.parentFrame;
if (frame == null) { break; }
if (cjump != null && cjump.branchFrame == frame) {
// Continuation branch point was hit,
// restart the state loop to reenter continuation
indexReg = -1;
continue StateLoop;
}
}
// No more frames, rethrow the exception or deal with continuation
if (cjump != null) {
if (cjump.branchFrame != null) {
// The above loop should locate the top frame
Kit.codeBug();
}
if (cjump.capturedFrame != null) {
// Restarting detached continuation
indexReg = -1;
continue StateLoop;
}
// Return continuation result to the caller
interpreterResult = cjump.result;
interpreterResultDbl = cjump.resultDbl;
throwable = null;
}
break StateLoop;
} // end of StateLoop: for(;;)
// Do cleanups/restorations before the final return or throw
if (cx.previousInterpreterInvocations != null
&& cx.previousInterpreterInvocations.size() != 0)
{
cx.lastInterpreterFrame
= cx.previousInterpreterInvocations.pop();
} else {
// It was the last interpreter frame on the stack
cx.lastInterpreterFrame = null;
// Force GC of the value cx.previousInterpreterInvocations
cx.previousInterpreterInvocations = null;
}
if (throwable != null) {
if (throwable instanceof RuntimeException) {
throw (RuntimeException)throwable;
} else {
// Must be instance of Error or code bug
throw (Error)throwable;
}
}
return (interpreterResult != DBL_MRK)
? interpreterResult
: ScriptRuntime.wrapNumber(interpreterResultDbl);
}
|
private static Object interpretLoop(Context cx, CallFrame frame,
Object throwable)
{
// throwable holds exception object to rethrow or catch
// It is also used for continuation restart in which case
// it holds ContinuationJump
final Object DBL_MRK = UniqueTag.DOUBLE_MARK;
final Object undefined = Undefined.instance;
final boolean instructionCounting = (cx.instructionThreshold != 0);
// arbitrary number to add to instructionCount when calling
// other functions
final int INVOCATION_COST = 100;
// arbitrary exception cost for instruction counting
final int EXCEPTION_COST = 100;
String stringReg = null;
int indexReg = -1;
if (cx.lastInterpreterFrame != null) {
// save the top frame from the previous interpreterLoop
// invocation on the stack
if (cx.previousInterpreterInvocations == null) {
cx.previousInterpreterInvocations = new ObjArray();
}
cx.previousInterpreterInvocations.push(cx.lastInterpreterFrame);
}
// When restarting continuation throwable is not null and to jump
// to the code that rewind continuation state indexReg should be set
// to -1.
// With the normal call throable == null and indexReg == -1 allows to
// catch bugs with using indeReg to access array eleemnts before
// initializing indexReg.
if (throwable != null) {
// Assert assumptions
if (!(throwable instanceof ContinuationJump)) {
// It should be continuation
Kit.codeBug();
}
}
Object interpreterResult = null;
double interpreterResultDbl = 0.0;
StateLoop: for (;;) {
withoutExceptions: try {
if (throwable != null) {
// Recovering from exception, indexReg contains
// the index of handler
if (indexReg >= 0) {
// Normal excepton handler, transfer
// control appropriately
if (frame.frozen) {
// XXX Deal with exceptios!!!
frame = frame.cloneFrozen();
}
int[] table = frame.idata.itsExceptionTable;
frame.pc = table[indexReg + EXCEPTION_HANDLER_SLOT];
if (instructionCounting) {
frame.pcPrevBranch = frame.pc;
}
frame.savedStackTop = frame.emptyStackTop;
int scopeLocal = frame.localShift
+ table[indexReg
+ EXCEPTION_SCOPE_SLOT];
int exLocal = frame.localShift
+ table[indexReg
+ EXCEPTION_LOCAL_SLOT];
frame.scope = (Scriptable)frame.stack[scopeLocal];
frame.stack[exLocal] = throwable;
throwable = null;
} else {
// Continuation restoration
ContinuationJump cjump = (ContinuationJump)throwable;
// Clear throwable to indicate that execptions are OK
throwable = null;
if (cjump.branchFrame != frame) Kit.codeBug();
// Check that we have at least one frozen frame
// in the case of detached continuation restoration:
// unwind code ensure that
if (cjump.capturedFrame == null) Kit.codeBug();
// Need to rewind branchFrame, capturedFrame
// and all frames in between
int rewindCount = cjump.capturedFrame.frameIndex + 1;
if (cjump.branchFrame != null) {
rewindCount -= cjump.branchFrame.frameIndex;
}
int enterCount = 0;
CallFrame[] enterFrames = null;
CallFrame x = cjump.capturedFrame;
for (int i = 0; i != rewindCount; ++i) {
if (!x.frozen) Kit.codeBug();
if (isFrameEnterExitRequired(x)) {
if (enterFrames == null) {
// Allocate enough space to store the rest
// of rewind frames in case all of them
// would require to enter
enterFrames = new CallFrame[rewindCount
- i];
}
enterFrames[enterCount] = x;
++enterCount;
}
x = x.parentFrame;
}
while (enterCount != 0) {
// execute enter: walk enterFrames in the reverse
// order since they were stored starting from
// the capturedFrame, not branchFrame
--enterCount;
x = enterFrames[enterCount];
enterFrame(cx, x, ScriptRuntime.emptyArgs, true);
}
// Continuation jump is almost done: capturedFrame
// points to the call to the function that captured
// continuation, so clone capturedFrame and
// emulate return that function with the suplied result
frame = cjump.capturedFrame.cloneFrozen();
setCallResult(frame, cjump.result, cjump.resultDbl);
// restart the execution
}
} else {
if (frame.frozen) Kit.codeBug();
}
// Use local variables for constant values in frame
// for faster access
Object[] stack = frame.stack;
double[] sDbl = frame.sDbl;
Object[] vars = frame.varSource.stack;
double[] varDbls = frame.varSource.sDbl;
byte[] iCode = frame.idata.itsICode;
String[] strings = frame.idata.itsStringTable;
// Use local for stackTop as well. Since execption handlers
// can only exist at statement level where stack is empty,
// it is necessary to save/restore stackTop only accross
// function calls and normal returns.
int stackTop = frame.savedStackTop;
// Store new frame in cx which is used for error reporting etc.
cx.lastInterpreterFrame = frame;
Loop: for (;;) {
// Exception handler assumes that PC is already incremented
// pass the instruction start when it searches the
// exception handler
int op = iCode[frame.pc++];
jumplessRun: {
// Back indent to ease imlementation reading
switch (op) {
case Token.THROW: {
Object value = stack[stackTop];
if (value == DBL_MRK) value = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
int sourceLine = getIndex(iCode, frame.pc);
throwable = new JavaScriptException(value,
frame.idata.itsSourceFile,
sourceLine);
break withoutExceptions;
}
case Token.RETHROW: {
indexReg += frame.localShift;
throwable = stack[indexReg];
break withoutExceptions;
}
case Token.GE :
case Token.LE :
case Token.GT :
case Token.LT : {
--stackTop;
Object rhs = stack[stackTop + 1];
Object lhs = stack[stackTop];
boolean valBln;
object_compare:
{
number_compare:
{
double rDbl, lDbl;
if (rhs == DBL_MRK) {
rDbl = sDbl[stackTop + 1];
lDbl = stack_double(frame, stackTop);
} else if (lhs == DBL_MRK) {
rDbl = ScriptRuntime.toNumber(rhs);
lDbl = sDbl[stackTop];
} else {
break number_compare;
}
switch (op) {
case Token.GE:
valBln = (lDbl >= rDbl);
break object_compare;
case Token.LE:
valBln = (lDbl <= rDbl);
break object_compare;
case Token.GT:
valBln = (lDbl > rDbl);
break object_compare;
case Token.LT:
valBln = (lDbl < rDbl);
break object_compare;
default:
throw Kit.codeBug();
}
}
switch (op) {
case Token.GE:
valBln = ScriptRuntime.cmp_LE(rhs, lhs);
break;
case Token.LE:
valBln = ScriptRuntime.cmp_LE(lhs, rhs);
break;
case Token.GT:
valBln = ScriptRuntime.cmp_LT(rhs, lhs);
break;
case Token.LT:
valBln = ScriptRuntime.cmp_LT(lhs, rhs);
break;
default:
throw Kit.codeBug();
}
}
stack[stackTop] = ScriptRuntime.wrapBoolean(valBln);
continue Loop;
}
case Token.IN :
case Token.INSTANCEOF : {
Object rhs = stack[stackTop];
if (rhs == DBL_MRK) rhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
boolean valBln;
if (op == Token.IN) {
valBln = ScriptRuntime.in(lhs, rhs, cx);
} else {
valBln = ScriptRuntime.instanceOf(lhs, rhs, cx);
}
stack[stackTop] = ScriptRuntime.wrapBoolean(valBln);
continue Loop;
}
case Token.EQ :
case Token.NE : {
--stackTop;
boolean valBln;
Object rhs = stack[stackTop + 1];
Object lhs = stack[stackTop];
if (rhs == DBL_MRK) {
if (lhs == DBL_MRK) {
valBln = (sDbl[stackTop] == sDbl[stackTop + 1]);
} else {
valBln = ScriptRuntime.eqNumber(sDbl[stackTop + 1], lhs);
}
} else {
if (lhs == DBL_MRK) {
valBln = ScriptRuntime.eqNumber(sDbl[stackTop], rhs);
} else {
valBln = ScriptRuntime.eq(lhs, rhs);
}
}
valBln ^= (op == Token.NE);
stack[stackTop] = ScriptRuntime.wrapBoolean(valBln);
continue Loop;
}
case Token.SHEQ :
case Token.SHNE : {
--stackTop;
Object rhs = stack[stackTop + 1];
Object lhs = stack[stackTop];
boolean valBln;
shallow_compare: {
double rdbl, ldbl;
if (rhs == DBL_MRK) {
rdbl = sDbl[stackTop + 1];
if (lhs == DBL_MRK) {
ldbl = sDbl[stackTop];
} else if (lhs instanceof Number) {
ldbl = ((Number)lhs).doubleValue();
} else {
valBln = false;
break shallow_compare;
}
} else if (lhs == DBL_MRK) {
ldbl = sDbl[stackTop];
if (rhs == DBL_MRK) {
rdbl = sDbl[stackTop + 1];
} else if (rhs instanceof Number) {
rdbl = ((Number)rhs).doubleValue();
} else {
valBln = false;
break shallow_compare;
}
} else {
valBln = ScriptRuntime.shallowEq(lhs, rhs);
break shallow_compare;
}
valBln = (ldbl == rdbl);
}
valBln ^= (op == Token.SHNE);
stack[stackTop] = ScriptRuntime.wrapBoolean(valBln);
continue Loop;
}
case Token.IFNE :
if (stack_boolean(frame, stackTop--)) {
frame.pc += 2;
continue Loop;
}
break jumplessRun;
case Token.IFEQ :
if (!stack_boolean(frame, stackTop--)) {
frame.pc += 2;
continue Loop;
}
break jumplessRun;
case Icode_IFEQ_POP :
if (!stack_boolean(frame, stackTop--)) {
frame.pc += 2;
continue Loop;
}
stack[stackTop--] = null;
break jumplessRun;
case Token.GOTO :
break jumplessRun;
case Icode_GOSUB :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = frame.pc + 2;
break jumplessRun;
case Icode_STARTSUB :
if (stackTop == frame.emptyStackTop + 1) {
// Call from Icode_GOSUB: store return PC address in the local
indexReg += frame.localShift;
stack[indexReg] = stack[stackTop];
sDbl[indexReg] = sDbl[stackTop];
--stackTop;
} else {
// Call from exception handler: exception object is already stored
// in the local
if (stackTop != frame.emptyStackTop) Kit.codeBug();
}
continue Loop;
case Icode_RETSUB : {
// indexReg: local to store return address
if (instructionCounting) {
addInstructionCount(cx, frame, 0);
}
indexReg += frame.localShift;
Object value = stack[indexReg];
if (value != DBL_MRK) {
// Invocation from exception handler, restore object to rethrow
throwable = value;
break withoutExceptions;
}
// Normal return from GOSUB
frame.pc = (int)sDbl[indexReg];
if (instructionCounting) {
frame.pcPrevBranch = frame.pc;
}
continue Loop;
}
case Icode_POP :
stack[stackTop] = null;
stackTop--;
continue Loop;
case Icode_POP_RESULT :
frame.result = stack[stackTop];
frame.resultDbl = sDbl[stackTop];
stack[stackTop] = null;
--stackTop;
continue Loop;
case Icode_DUP :
stack[stackTop + 1] = stack[stackTop];
sDbl[stackTop + 1] = sDbl[stackTop];
stackTop++;
continue Loop;
case Icode_DUP2 :
stack[stackTop + 1] = stack[stackTop - 1];
sDbl[stackTop + 1] = sDbl[stackTop - 1];
stack[stackTop + 2] = stack[stackTop];
sDbl[stackTop + 2] = sDbl[stackTop];
stackTop += 2;
continue Loop;
case Icode_SWAP : {
Object o = stack[stackTop];
stack[stackTop] = stack[stackTop - 1];
stack[stackTop - 1] = o;
double d = sDbl[stackTop];
sDbl[stackTop] = sDbl[stackTop - 1];
sDbl[stackTop - 1] = d;
continue Loop;
}
case Token.RETURN :
frame.result = stack[stackTop];
frame.resultDbl = sDbl[stackTop];
--stackTop;
break Loop;
case Token.RETURN_RESULT :
break Loop;
case Icode_RETUNDEF :
frame.result = undefined;
break Loop;
case Token.BITNOT : {
int rIntValue = stack_int32(frame, stackTop);
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = ~rIntValue;
continue Loop;
}
case Token.BITAND :
case Token.BITOR :
case Token.BITXOR :
case Token.LSH :
case Token.RSH : {
int rIntValue = stack_int32(frame, stackTop);
--stackTop;
int lIntValue = stack_int32(frame, stackTop);
stack[stackTop] = DBL_MRK;
switch (op) {
case Token.BITAND:
lIntValue &= rIntValue;
break;
case Token.BITOR:
lIntValue |= rIntValue;
break;
case Token.BITXOR:
lIntValue ^= rIntValue;
break;
case Token.LSH:
lIntValue <<= rIntValue;
break;
case Token.RSH:
lIntValue >>= rIntValue;
break;
}
sDbl[stackTop] = lIntValue;
continue Loop;
}
case Token.URSH : {
int rIntValue = stack_int32(frame, stackTop) & 0x1F;
--stackTop;
double lDbl = stack_double(frame, stackTop);
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = ScriptRuntime.toUint32(lDbl) >>> rIntValue;
continue Loop;
}
case Token.NEG :
case Token.POS : {
double rDbl = stack_double(frame, stackTop);
stack[stackTop] = DBL_MRK;
if (op == Token.NEG) {
rDbl = -rDbl;
}
sDbl[stackTop] = rDbl;
continue Loop;
}
case Token.ADD :
--stackTop;
do_add(stack, sDbl, stackTop, cx);
continue Loop;
case Token.SUB :
case Token.MUL :
case Token.DIV :
case Token.MOD : {
double rDbl = stack_double(frame, stackTop);
--stackTop;
double lDbl = stack_double(frame, stackTop);
stack[stackTop] = DBL_MRK;
switch (op) {
case Token.SUB:
lDbl -= rDbl;
break;
case Token.MUL:
lDbl *= rDbl;
break;
case Token.DIV:
lDbl /= rDbl;
break;
case Token.MOD:
lDbl %= rDbl;
break;
}
sDbl[stackTop] = lDbl;
continue Loop;
}
case Token.NOT :
stack[stackTop] = ScriptRuntime.wrapBoolean(
!stack_boolean(frame, stackTop));
continue Loop;
case Token.BINDNAME :
stack[++stackTop] = ScriptRuntime.bind(cx, frame.scope, stringReg);
continue Loop;
case Token.SETNAME : {
Object rhs = stack[stackTop];
if (rhs == DBL_MRK) rhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Scriptable lhs = (Scriptable)stack[stackTop];
stack[stackTop] = ScriptRuntime.setName(lhs, rhs, cx,
frame.scope, stringReg);
continue Loop;
}
case Token.DELPROP : {
Object rhs = stack[stackTop];
if (rhs == DBL_MRK) rhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.delete(lhs, rhs, cx);
continue Loop;
}
case Token.GETPROP : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.getObjectProp(lhs, stringReg, cx);
continue Loop;
}
case Token.SETPROP : {
Object rhs = stack[stackTop];
if (rhs == DBL_MRK) rhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.setObjectProp(lhs, stringReg, rhs,
cx);
continue Loop;
}
case Icode_PROP_INC_DEC : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.propIncrDecr(lhs, stringReg,
cx, iCode[frame.pc]);
++frame.pc;
continue Loop;
}
case Token.GETELEM : {
--stackTop;
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) {
lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
}
Object value;
Object id = stack[stackTop + 1];
if (id != DBL_MRK) {
value = ScriptRuntime.getObjectElem(lhs, id, cx);
} else {
double d = sDbl[stackTop + 1];
value = ScriptRuntime.getObjectIndex(lhs, d, cx);
}
stack[stackTop] = value;
continue Loop;
}
case Token.SETELEM : {
stackTop -= 2;
Object rhs = stack[stackTop + 2];
if (rhs == DBL_MRK) {
rhs = ScriptRuntime.wrapNumber(sDbl[stackTop + 2]);
}
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) {
lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
}
Object value;
Object id = stack[stackTop + 1];
if (id != DBL_MRK) {
value = ScriptRuntime.setObjectElem(lhs, id, rhs, cx);
} else {
double d = sDbl[stackTop + 1];
value = ScriptRuntime.setObjectIndex(lhs, d, rhs, cx);
}
stack[stackTop] = value;
continue Loop;
}
case Icode_ELEM_INC_DEC: {
Object rhs = stack[stackTop];
if (rhs == DBL_MRK) rhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.elemIncrDecr(lhs, rhs, cx,
iCode[frame.pc]);
++frame.pc;
continue Loop;
}
case Token.GET_REF : {
Ref ref = (Ref)stack[stackTop];
stack[stackTop] = ScriptRuntime.refGet(ref, cx);
continue Loop;
}
case Token.SET_REF : {
Object value = stack[stackTop];
if (value == DBL_MRK) value = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Ref ref = (Ref)stack[stackTop];
stack[stackTop] = ScriptRuntime.refSet(ref, value, cx);
continue Loop;
}
case Token.DEL_REF : {
Ref ref = (Ref)stack[stackTop];
stack[stackTop] = ScriptRuntime.refDel(ref, cx);
continue Loop;
}
case Icode_REF_INC_DEC : {
Ref ref = (Ref)stack[stackTop];
stack[stackTop] = ScriptRuntime.refIncrDecr(ref, cx, iCode[frame.pc]);
++frame.pc;
continue Loop;
}
case Token.LOCAL_LOAD :
++stackTop;
indexReg += frame.localShift;
stack[stackTop] = stack[indexReg];
sDbl[stackTop] = sDbl[indexReg];
continue Loop;
case Icode_LOCAL_CLEAR :
indexReg += frame.localShift;
stack[indexReg] = null;
continue Loop;
case Icode_NAME_AND_THIS :
// stringReg: name
++stackTop;
stack[stackTop] = ScriptRuntime.getNameFunctionAndThis(stringReg,
cx, frame.scope);
++stackTop;
stack[stackTop] = ScriptRuntime.lastStoredScriptable(cx);
continue Loop;
case Icode_PROP_AND_THIS: {
Object obj = stack[stackTop];
if (obj == DBL_MRK) obj = ScriptRuntime.wrapNumber(sDbl[stackTop]);
// stringReg: property
stack[stackTop] = ScriptRuntime.getPropFunctionAndThis(obj, stringReg,
cx);
++stackTop;
stack[stackTop] = ScriptRuntime.lastStoredScriptable(cx);
continue Loop;
}
case Icode_ELEM_AND_THIS: {
Object obj = stack[stackTop - 1];
if (obj == DBL_MRK) obj = ScriptRuntime.wrapNumber(sDbl[stackTop - 1]);
Object id = stack[stackTop];
if (id == DBL_MRK) id = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop - 1] = ScriptRuntime.getElemFunctionAndThis(obj, id, cx);
stack[stackTop] = ScriptRuntime.lastStoredScriptable(cx);
continue Loop;
}
case Icode_VALUE_AND_THIS : {
Object value = stack[stackTop];
if (value == DBL_MRK) value = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.getValueFunctionAndThis(value, cx);
++stackTop;
stack[stackTop] = ScriptRuntime.lastStoredScriptable(cx);
continue Loop;
}
case Icode_CALLSPECIAL : {
if (instructionCounting) {
cx.instructionCount += INVOCATION_COST;
}
int callType = iCode[frame.pc] & 0xFF;
boolean isNew = (iCode[frame.pc + 1] != 0);
int sourceLine = getIndex(iCode, frame.pc + 2);
// indexReg: number of arguments
if (isNew) {
// stack change: function arg0 .. argN -> newResult
stackTop -= indexReg;
Object function = stack[stackTop];
if (function == DBL_MRK)
function = ScriptRuntime.wrapNumber(sDbl[stackTop]);
Object[] outArgs = getArgsArray(
stack, sDbl, stackTop + 1, indexReg);
stack[stackTop] = ScriptRuntime.newSpecial(
cx, function, outArgs, frame.scope, callType);
} else {
// stack change: function thisObj arg0 .. argN -> result
stackTop -= 1 + indexReg;
// Call code generation ensure that stack here
// is ... Callable Scriptable
Scriptable functionThis = (Scriptable)stack[stackTop + 1];
Callable function = (Callable)stack[stackTop];
Object[] outArgs = getArgsArray(
stack, sDbl, stackTop + 2, indexReg);
stack[stackTop] = ScriptRuntime.callSpecial(
cx, function, functionThis, outArgs,
frame.scope, frame.thisObj, callType,
frame.idata.itsSourceFile, sourceLine);
}
frame.pc += 4;
continue Loop;
}
case Token.CALL :
case Icode_TAIL_CALL :
case Token.REF_CALL : {
if (instructionCounting) {
cx.instructionCount += INVOCATION_COST;
}
// stack change: function thisObj arg0 .. argN -> result
// indexReg: number of arguments
stackTop -= 1 + indexReg;
// CALL generation ensures that fun and funThisObj
// are already Scriptable and Callable objects respectively
Callable fun = (Callable)stack[stackTop];
Scriptable funThisObj = (Scriptable)stack[stackTop + 1];
if (op == Token.REF_CALL) {
Object[] outArgs = getArgsArray(stack, sDbl, stackTop + 2,
indexReg);
stack[stackTop] = ScriptRuntime.callRef(fun, funThisObj,
outArgs, cx);
continue Loop;
}
Scriptable calleeScope = frame.scope;
if (frame.useActivation) {
calleeScope = ScriptableObject.getTopLevelScope(frame.scope);
}
if (fun instanceof InterpretedFunction) {
InterpretedFunction ifun = (InterpretedFunction)fun;
if (frame.fnOrScript.securityDomain == ifun.securityDomain) {
CallFrame callParentFrame = frame;
CallFrame calleeFrame = new CallFrame();
if (op == Icode_TAIL_CALL) {
// In principle tail call can re-use the current
// frame and its stack arrays but it is hard to
// do properly. Any exceptions that can legally
// happen during frame re-initialization including
// StackOverflowException during innocent looking
// System.arraycopy may leave the current frame
// data corrupted leading to undefined behaviour
// in the catch code bellow that unwinds JS stack
// on exceptions. Then there is issue about frame release
// end exceptions there.
// To avoid frame allocation a released frame
// can be cached for re-use which would also benefit
// non-tail calls but it is not clear that this caching
// would gain in performance due to potentially
// bad iteraction with GC.
callParentFrame = frame.parentFrame;
// Release the current frame. See Bug #344501 to see why
// it is being done here.
// TODO: If using the graphical debugger, tail call
// optimization will create a "hole" in the context stack.
// The correct thing to do may be to disable tail call
// optimization if the code is being debugged.
exitFrame(cx, frame, null); }
initFrame(cx, calleeScope, funThisObj, stack, sDbl,
stackTop + 2, indexReg, ifun, callParentFrame,
calleeFrame);
if (op != Icode_TAIL_CALL) {
frame.savedStackTop = stackTop;
frame.savedCallOp = op;
}
frame = calleeFrame;
continue StateLoop;
}
}
if (fun instanceof Continuation) {
// Jump to the captured continuation
ContinuationJump cjump;
cjump = new ContinuationJump((Continuation)fun, frame);
// continuation result is the first argument if any
// of contination call
if (indexReg == 0) {
cjump.result = undefined;
} else {
cjump.result = stack[stackTop + 2];
cjump.resultDbl = sDbl[stackTop + 2];
}
// Start the real unwind job
throwable = cjump;
break withoutExceptions;
}
if (fun instanceof IdFunctionObject) {
IdFunctionObject ifun = (IdFunctionObject)fun;
if (Continuation.isContinuationConstructor(ifun)) {
captureContinuation(cx, frame, stackTop);
continue Loop;
}
}
Object[] outArgs = getArgsArray(stack, sDbl, stackTop + 2,
indexReg);
stack[stackTop] = fun.call(cx, calleeScope, funThisObj, outArgs);
continue Loop;
}
case Token.NEW : {
if (instructionCounting) {
cx.instructionCount += INVOCATION_COST;
}
// stack change: function arg0 .. argN -> newResult
// indexReg: number of arguments
stackTop -= indexReg;
Object lhs = stack[stackTop];
if (lhs instanceof InterpretedFunction) {
InterpretedFunction f = (InterpretedFunction)lhs;
if (frame.fnOrScript.securityDomain == f.securityDomain) {
Scriptable newInstance = f.createObject(cx, frame.scope);
CallFrame calleeFrame = new CallFrame();
initFrame(cx, frame.scope, newInstance, stack, sDbl,
stackTop + 1, indexReg, f, frame,
calleeFrame);
stack[stackTop] = newInstance;
frame.savedStackTop = stackTop;
frame.savedCallOp = op;
frame = calleeFrame;
continue StateLoop;
}
}
if (!(lhs instanceof Function)) {
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
throw ScriptRuntime.notFunctionError(lhs);
}
Function fun = (Function)lhs;
if (fun instanceof IdFunctionObject) {
IdFunctionObject ifun = (IdFunctionObject)fun;
if (Continuation.isContinuationConstructor(ifun)) {
captureContinuation(cx, frame, stackTop);
continue Loop;
}
}
Object[] outArgs = getArgsArray(stack, sDbl, stackTop + 1, indexReg);
stack[stackTop] = fun.construct(cx, frame.scope, outArgs);
continue Loop;
}
case Token.TYPEOF : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.typeof(lhs);
continue Loop;
}
case Icode_TYPEOFNAME :
stack[++stackTop] = ScriptRuntime.typeofName(frame.scope, stringReg);
continue Loop;
case Token.STRING :
stack[++stackTop] = stringReg;
continue Loop;
case Icode_SHORTNUMBER :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = getShort(iCode, frame.pc);
frame.pc += 2;
continue Loop;
case Icode_INTNUMBER :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = getInt(iCode, frame.pc);
frame.pc += 4;
continue Loop;
case Token.NUMBER :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = frame.idata.itsDoubleTable[indexReg];
continue Loop;
case Token.NAME :
stack[++stackTop] = ScriptRuntime.name(cx, frame.scope, stringReg);
continue Loop;
case Icode_NAME_INC_DEC :
stack[++stackTop] = ScriptRuntime.nameIncrDecr(frame.scope, stringReg,
iCode[frame.pc]);
++frame.pc;
continue Loop;
case Icode_SETVAR1:
indexReg = iCode[frame.pc++];
// fallthrough
case Token.SETVAR :
if (!frame.useActivation) {
vars[indexReg] = stack[stackTop];
varDbls[indexReg] = sDbl[stackTop];
} else {
Object val = stack[stackTop];
if (val == DBL_MRK) val = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stringReg = frame.idata.argNames[indexReg];
frame.scope.put(stringReg, frame.scope, val);
}
continue Loop;
case Icode_GETVAR1:
indexReg = iCode[frame.pc++];
// fallthrough
case Token.GETVAR :
++stackTop;
if (!frame.useActivation) {
stack[stackTop] = vars[indexReg];
sDbl[stackTop] = varDbls[indexReg];
} else {
stringReg = frame.idata.argNames[indexReg];
stack[stackTop] = frame.scope.get(stringReg, frame.scope);
}
continue Loop;
case Icode_VAR_INC_DEC : {
// indexReg : varindex
++stackTop;
int incrDecrMask = iCode[frame.pc];
if (!frame.useActivation) {
stack[stackTop] = DBL_MRK;
Object varValue = vars[indexReg];
double d;
if (varValue == DBL_MRK) {
d = varDbls[indexReg];
} else {
d = ScriptRuntime.toNumber(varValue);
vars[indexReg] = DBL_MRK;
}
double d2 = ((incrDecrMask & Node.DECR_FLAG) == 0)
? d + 1.0 : d - 1.0;
varDbls[indexReg] = d2;
sDbl[stackTop] = ((incrDecrMask & Node.POST_FLAG) == 0) ? d2 : d;
} else {
String varName = frame.idata.argNames[indexReg];
stack[stackTop] = ScriptRuntime.nameIncrDecr(frame.scope, varName,
incrDecrMask);
}
++frame.pc;
continue Loop;
}
case Icode_ZERO :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = 0;
continue Loop;
case Icode_ONE :
++stackTop;
stack[stackTop] = DBL_MRK;
sDbl[stackTop] = 1;
continue Loop;
case Token.NULL :
stack[++stackTop] = null;
continue Loop;
case Token.THIS :
stack[++stackTop] = frame.thisObj;
continue Loop;
case Token.THISFN :
stack[++stackTop] = frame.fnOrScript;
continue Loop;
case Token.FALSE :
stack[++stackTop] = Boolean.FALSE;
continue Loop;
case Token.TRUE :
stack[++stackTop] = Boolean.TRUE;
continue Loop;
case Icode_UNDEF :
stack[++stackTop] = undefined;
continue Loop;
case Token.ENTERWITH : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
frame.scope = ScriptRuntime.enterWith(lhs, cx, frame.scope);
continue Loop;
}
case Token.LEAVEWITH :
frame.scope = ScriptRuntime.leaveWith(frame.scope);
continue Loop;
case Token.CATCH_SCOPE : {
// stack top: exception object
// stringReg: name of exception variable
// indexReg: local for exception scope
--stackTop;
indexReg += frame.localShift;
boolean afterFirstScope = (frame.idata.itsICode[frame.pc] != 0);
Throwable caughtException = (Throwable)stack[stackTop + 1];
Scriptable lastCatchScope;
if (!afterFirstScope) {
lastCatchScope = null;
} else {
lastCatchScope = (Scriptable)stack[indexReg];
}
stack[indexReg] = ScriptRuntime.newCatchScope(caughtException,
lastCatchScope, stringReg,
cx, frame.scope);
++frame.pc;
continue Loop;
}
case Token.ENUM_INIT_KEYS :
case Token.ENUM_INIT_VALUES : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
indexReg += frame.localShift;
stack[indexReg] = ScriptRuntime.enumInit(
lhs, cx, (op == Token.ENUM_INIT_VALUES));
continue Loop;
}
case Token.ENUM_NEXT :
case Token.ENUM_ID : {
indexReg += frame.localShift;
Object val = stack[indexReg];
++stackTop;
stack[stackTop] = (op == Token.ENUM_NEXT)
? (Object)ScriptRuntime.enumNext(val)
: (Object)ScriptRuntime.enumId(val, cx);
continue Loop;
}
case Token.REF_SPECIAL : {
//stringReg: name of special property
Object obj = stack[stackTop];
if (obj == DBL_MRK) obj = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.specialRef(obj, stringReg, cx);
continue Loop;
}
case Token.REF_MEMBER: {
//indexReg: flags
Object elem = stack[stackTop];
if (elem == DBL_MRK) elem = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object obj = stack[stackTop];
if (obj == DBL_MRK) obj = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.memberRef(obj, elem, cx, indexReg);
continue Loop;
}
case Token.REF_NS_MEMBER: {
//indexReg: flags
Object elem = stack[stackTop];
if (elem == DBL_MRK) elem = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object ns = stack[stackTop];
if (ns == DBL_MRK) ns = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object obj = stack[stackTop];
if (obj == DBL_MRK) obj = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.memberRef(obj, ns, elem, cx, indexReg);
continue Loop;
}
case Token.REF_NAME: {
//indexReg: flags
Object name = stack[stackTop];
if (name == DBL_MRK) name = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.nameRef(name, cx, frame.scope,
indexReg);
continue Loop;
}
case Token.REF_NS_NAME: {
//indexReg: flags
Object name = stack[stackTop];
if (name == DBL_MRK) name = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
Object ns = stack[stackTop];
if (ns == DBL_MRK) ns = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.nameRef(ns, name, cx, frame.scope,
indexReg);
continue Loop;
}
case Icode_SCOPE_LOAD :
indexReg += frame.localShift;
frame.scope = (Scriptable)stack[indexReg];
continue Loop;
case Icode_SCOPE_SAVE :
indexReg += frame.localShift;
stack[indexReg] = frame.scope;
continue Loop;
case Icode_CLOSURE_EXPR :
stack[++stackTop] = InterpretedFunction.createFunction(cx, frame.scope,
frame.fnOrScript,
indexReg);
continue Loop;
case Icode_CLOSURE_STMT :
initFunction(cx, frame.scope, frame.fnOrScript, indexReg);
continue Loop;
case Token.REGEXP :
stack[++stackTop] = frame.scriptRegExps[indexReg];
continue Loop;
case Icode_LITERAL_NEW :
// indexReg: number of values in the literal
++stackTop;
stack[stackTop] = new Object[indexReg];
sDbl[stackTop] = 0;
continue Loop;
case Icode_LITERAL_SET : {
Object value = stack[stackTop];
if (value == DBL_MRK) value = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
int i = (int)sDbl[stackTop];
((Object[])stack[stackTop])[i] = value;
sDbl[stackTop] = i + 1;
continue Loop;
}
case Token.ARRAYLIT :
case Icode_SPARE_ARRAYLIT :
case Token.OBJECTLIT : {
Object[] data = (Object[])stack[stackTop];
Object val;
if (op == Token.OBJECTLIT) {
Object[] ids = (Object[])frame.idata.literalIds[indexReg];
val = ScriptRuntime.newObjectLiteral(ids, data, cx, frame.scope);
} else {
int[] skipIndexces = null;
if (op == Icode_SPARE_ARRAYLIT) {
skipIndexces = (int[])frame.idata.literalIds[indexReg];
}
val = ScriptRuntime.newArrayLiteral(data, skipIndexces, cx,
frame.scope);
}
stack[stackTop] = val;
continue Loop;
}
case Icode_ENTERDQ : {
Object lhs = stack[stackTop];
if (lhs == DBL_MRK) lhs = ScriptRuntime.wrapNumber(sDbl[stackTop]);
--stackTop;
frame.scope = ScriptRuntime.enterDotQuery(lhs, frame.scope);
continue Loop;
}
case Icode_LEAVEDQ : {
boolean valBln = stack_boolean(frame, stackTop);
Object x = ScriptRuntime.updateDotQuery(valBln, frame.scope);
if (x != null) {
stack[stackTop] = x;
frame.scope = ScriptRuntime.leaveDotQuery(frame.scope);
frame.pc += 2;
continue Loop;
}
// reset stack and PC to code after ENTERDQ
--stackTop;
break jumplessRun;
}
case Token.DEFAULTNAMESPACE : {
Object value = stack[stackTop];
if (value == DBL_MRK) value = ScriptRuntime.wrapNumber(sDbl[stackTop]);
stack[stackTop] = ScriptRuntime.setDefaultNamespace(value, cx);
continue Loop;
}
case Token.ESCXMLATTR : {
Object value = stack[stackTop];
if (value != DBL_MRK) {
stack[stackTop] = ScriptRuntime.escapeAttributeValue(value, cx);
}
continue Loop;
}
case Token.ESCXMLTEXT : {
Object value = stack[stackTop];
if (value != DBL_MRK) {
stack[stackTop] = ScriptRuntime.escapeTextValue(value, cx);
}
continue Loop;
}
case Icode_LINE :
frame.pcSourceLineStart = frame.pc;
if (frame.debuggerFrame != null) {
int line = getIndex(iCode, frame.pc);
frame.debuggerFrame.onLineChange(cx, line);
}
frame.pc += 2;
continue Loop;
case Icode_REG_IND_C0:
indexReg = 0;
continue Loop;
case Icode_REG_IND_C1:
indexReg = 1;
continue Loop;
case Icode_REG_IND_C2:
indexReg = 2;
continue Loop;
case Icode_REG_IND_C3:
indexReg = 3;
continue Loop;
case Icode_REG_IND_C4:
indexReg = 4;
continue Loop;
case Icode_REG_IND_C5:
indexReg = 5;
continue Loop;
case Icode_REG_IND1:
indexReg = 0xFF & iCode[frame.pc];
++frame.pc;
continue Loop;
case Icode_REG_IND2:
indexReg = getIndex(iCode, frame.pc);
frame.pc += 2;
continue Loop;
case Icode_REG_IND4:
indexReg = getInt(iCode, frame.pc);
frame.pc += 4;
continue Loop;
case Icode_REG_STR_C0:
stringReg = strings[0];
continue Loop;
case Icode_REG_STR_C1:
stringReg = strings[1];
continue Loop;
case Icode_REG_STR_C2:
stringReg = strings[2];
continue Loop;
case Icode_REG_STR_C3:
stringReg = strings[3];
continue Loop;
case Icode_REG_STR1:
stringReg = strings[0xFF & iCode[frame.pc]];
++frame.pc;
continue Loop;
case Icode_REG_STR2:
stringReg = strings[getIndex(iCode, frame.pc)];
frame.pc += 2;
continue Loop;
case Icode_REG_STR4:
stringReg = strings[getInt(iCode, frame.pc)];
frame.pc += 4;
continue Loop;
default :
dumpICode(frame.idata);
throw new RuntimeException(
"Unknown icode : "+op+" @ pc : "+(frame.pc-1));
} // end of interpreter switch
} // end of jumplessRun label block
// This should be reachable only for jump implementation
// when pc points to encoded target offset
if (instructionCounting) {
addInstructionCount(cx, frame, 2);
}
int offset = getShort(iCode, frame.pc);
if (offset != 0) {
// -1 accounts for pc pointing to jump opcode + 1
frame.pc += offset - 1;
} else {
frame.pc = frame.idata.longJumps.
getExistingInt(frame.pc);
}
if (instructionCounting) {
frame.pcPrevBranch = frame.pc;
}
continue Loop;
} // end of Loop: for
exitFrame(cx, frame, null);
interpreterResult = frame.result;
interpreterResultDbl = frame.resultDbl;
if (frame.parentFrame != null) {
frame = frame.parentFrame;
if (frame.frozen) {
frame = frame.cloneFrozen();
}
setCallResult(
frame, interpreterResult, interpreterResultDbl);
interpreterResult = null; // Help GC
continue StateLoop;
}
break StateLoop;
} // end of interpreter withoutExceptions: try
catch (Throwable ex) {
if (throwable != null) {
// This is serious bug and it is better to track it ASAP
ex.printStackTrace(System.err);
throw new IllegalStateException();
}
throwable = ex;
}
// This should be reachable only after above catch or from
// finally when it needs to propagate exception or from
// explicit throw
if (throwable == null) Kit.codeBug();
// Exception type
final int EX_CATCH_STATE = 2; // Can execute JS catch
final int EX_FINALLY_STATE = 1; // Can execute JS finally
final int EX_NO_JS_STATE = 0; // Terminate JS execution
int exState;
ContinuationJump cjump = null;
if (throwable instanceof JavaScriptException) {
exState = EX_CATCH_STATE;
} else if (throwable instanceof EcmaError) {
// an offical ECMA error object,
exState = EX_CATCH_STATE;
} else if (throwable instanceof EvaluatorException) {
exState = EX_CATCH_STATE;
} else if (throwable instanceof RuntimeException) {
exState = EX_FINALLY_STATE;
} else if (throwable instanceof Error) {
exState = EX_NO_JS_STATE;
} else {
// It must be ContinuationJump
exState = EX_FINALLY_STATE;
cjump = (ContinuationJump)throwable;
}
if (instructionCounting) {
try {
addInstructionCount(cx, frame, EXCEPTION_COST);
} catch (RuntimeException ex) {
throwable = ex;
exState = EX_FINALLY_STATE;
} catch (Error ex) {
// Error from instruction counting
// => unconditionally terminate JS
throwable = ex;
cjump = null;
exState = EX_NO_JS_STATE;
}
}
if (frame.debuggerFrame != null
&& throwable instanceof RuntimeException)
{
// Call debugger only for RuntimeException
RuntimeException rex = (RuntimeException)throwable;
try {
frame.debuggerFrame.onExceptionThrown(cx, rex);
} catch (Throwable ex) {
// Any exception from debugger
// => unconditionally terminate JS
throwable = ex;
cjump = null;
exState = EX_NO_JS_STATE;
}
}
for (;;) {
if (exState != EX_NO_JS_STATE) {
boolean onlyFinally = (exState != EX_CATCH_STATE);
indexReg = getExceptionHandler(frame, onlyFinally);
if (indexReg >= 0) {
// We caught an exception, restart the loop
// with exception pending the processing at the loop
// start
continue StateLoop;
}
}
// No allowed execption handlers in this frame, unwind
// to parent and try to look there
exitFrame(cx, frame, throwable);
frame = frame.parentFrame;
if (frame == null) { break; }
if (cjump != null && cjump.branchFrame == frame) {
// Continuation branch point was hit,
// restart the state loop to reenter continuation
indexReg = -1;
continue StateLoop;
}
}
// No more frames, rethrow the exception or deal with continuation
if (cjump != null) {
if (cjump.branchFrame != null) {
// The above loop should locate the top frame
Kit.codeBug();
}
if (cjump.capturedFrame != null) {
// Restarting detached continuation
indexReg = -1;
continue StateLoop;
}
// Return continuation result to the caller
interpreterResult = cjump.result;
interpreterResultDbl = cjump.resultDbl;
throwable = null;
}
break StateLoop;
} // end of StateLoop: for(;;)
// Do cleanups/restorations before the final return or throw
if (cx.previousInterpreterInvocations != null
&& cx.previousInterpreterInvocations.size() != 0)
{
cx.lastInterpreterFrame
= cx.previousInterpreterInvocations.pop();
} else {
// It was the last interpreter frame on the stack
cx.lastInterpreterFrame = null;
// Force GC of the value cx.previousInterpreterInvocations
cx.previousInterpreterInvocations = null;
}
if (throwable != null) {
if (throwable instanceof RuntimeException) {
throw (RuntimeException)throwable;
} else {
// Must be instance of Error or code bug
throw (Error)throwable;
}
}
return (interpreterResult != DBL_MRK)
? interpreterResult
: ScriptRuntime.wrapNumber(interpreterResultDbl);
}
|
diff --git a/src/edu/cmu/cs211/pg/algorithms/MstTspApproximation.java b/src/edu/cmu/cs211/pg/algorithms/MstTspApproximation.java
index 90c2954..f8ccace 100644
--- a/src/edu/cmu/cs211/pg/algorithms/MstTspApproximation.java
+++ b/src/edu/cmu/cs211/pg/algorithms/MstTspApproximation.java
@@ -1,195 +1,195 @@
package edu.cmu.cs211.pg.algorithms;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Set;
import java.util.TreeSet;
import edu.cmu.cs211.pg.graph.Edge;
import edu.cmu.cs211.pg.graph.Graph;
import edu.cmu.cs211.pg.graph.MyDirectedGraph;
import edu.cmu.cs211.pg.graph.Path;
import edu.cmu.cs211.pg.graph.WeightedEdge;
public class MstTspApproximation<V extends Comparable<V>>
{
private Dijkstra dijkstra;
private Kruskal kruskal;
protected MstTspApproximation()
{
kruskal = new Kruskal();
dijkstra = new Dijkstra();
}
public MstTspApproximation(Kruskal kruskal, Dijkstra dijkstra)
{
this.kruskal = kruskal;
this.dijkstra = dijkstra;
}
/**
* Generates a tour of a specified set of nodes in the graph using the
* MST-TSP 2-competitive approximation algorithm.
*
* In order to aid in the unit testing of this class, we require
* that the order you visit neighbors in the DFS portion of
* the algorithm is the same as the natural ordering of the vertex
* type V. You should also use the instances of Kruskal and
* Dijkstra passed into the constructor of this class, not your own.
*
* @param g the graph to generate the tour on. This UNDIRECTED
* graph may not be complete, but must be connected.
*
* @param verts the vertices in the graph that you want to tour.
* This may be a subset of the vertices in the graph.
*
* @param start the vertex at which the tour must begin and end.
* start may not be in the set of vertices to tour, but
* your tour must begin and end at start.
*
* @throws IllegalArgumentException if any of the vertices in the
* verts set are not in the graph.
* @throws NullPointerException if g, verts, or start are null
*
* @return A list of vertices representing the order you would visit
* vertices in the original graph such that every vertex in
* the specified verts set is visited at least once.
*/
public List<V> approximateTour(Graph<V,WeightedEdge<V>> g, Set<V> verts, V start)
{
if (g == null || verts == null || start == null)
throw new NullPointerException("Null parameter to approximateTour()");
// Do we need to come back to the start node (true cycle)?
boolean backToStart = false;
- if (!verts.contains(start)) {
+ if (verts.contains(start)) {
backToStart = true;
}
else {
// we need start in there anyway, for the MST
verts = new HashSet<V>(verts);
verts.add(start);
}
// form basic graph of ONLY verts with all edges directly between vertices
Graph<V, WeightedEdge<V>> reduced = new MyDirectedGraph<V, WeightedEdge<V>>(verts);
Object[] it = verts.toArray();
for (int i = 0; i < it.length; i++) {
for (int j = 0; j < it.length; j++) {
if (i == j)
continue;
WeightedEdge<V> newEdge = g.adjacent((V)it[i], (V)it[j]);
if (newEdge != null)
reduced.addEdge(newEdge);
}
}
// add on edges that do not exist in current graph
for (int i = 0; i < it.length; i++) {
for (int j = 0; j < it.length; j++) {
if (j == i)
continue;
Path<V, WeightedEdge<V>> currentShortPath = dijkstra.shortestPath(reduced, (V)it[i], (V)it[j]);
Path<V, WeightedEdge<V>> shortPath = dijkstra.shortestPath(g, (V)it[i], (V)it[j]);
if (shortPath == null)
throw new IllegalArgumentException(); // cannot form mst
if (currentShortPath == null ||
shortPath.pathWeight() < currentShortPath.pathWeight()) {
reduced.addEdge(new WeightedEdge<V>((V)it[i], (V)it[j], shortPath.pathWeight()));
}
}
}
// Kruskal's to find MST of reduced graph
Graph<V,WeightedEdge<V>> mst = kruskal.MST(reduced);
// DFS to pre-order traversal of the MST
HashSet<V> visited = new HashSet<V>();
directedToUndirected(mst);
List<V> order = dfs(mst, start, visited); // the order in which we visit the nodes needed
order.add(start);
// list of all nodes we visit, in order, in our traversal
List<V> traversal = new ArrayList<V>();
for (int i = 1; i < order.size(); i++)
traversal.addAll(vertices(dijkstra.shortestPath(g, order.get(i - 1), order.get(i)).edges()));
if (backToStart)
traversal.addAll(vertices(dijkstra.shortestPath(g, order.get(order.size() - 1), start).edges()));
return traversal;
}
/**
* Depth First Search of a graph, to iterate through the MST
*
* @param g the minimum spanning tree being traversed
* @param start the node we are searching from now
* @return a list of all nodes in g, preordered by depth first search
*/
private List<V> dfs(Graph<V, WeightedEdge<V>> mst, V start, Set<V> visited)
{
visited.add(start);
List<V> ret = new ArrayList<V>();
ret.add(start);
PriorityQueue<V> neighbors = new PriorityQueue<V>(mst.outgoingNeighbors(start));
while (!neighbors.isEmpty()) {
V neighbor = neighbors.poll();
// Have we already looked at the node?
if (!visited.contains(neighbor)) {
// Traverse via preorder
ret.addAll(dfs(mst, neighbor, visited));
}
}
return ret;
}
/**
* Turn a directed weighted graph into, practically, an undirected unweighted graph
* by adding opposite-direction edges for each edge with weights of 0
* @param g the graph we are transforming
*/
private void directedToUndirected(Graph<V, WeightedEdge<V>> g)
{
Object[] it = g.vertices().toArray();
for (int i = 0; i < it.length; i++) {
for (int j = 0; j < it.length; j++) {
if (g.adjacent((V)it[i], (V)it[j]) != null)
g.addEdge(new WeightedEdge<V>((V)it[j], (V)it[i], 0));
else if (g.adjacent((V)it[j], (V)it[i]) != null)
g.addEdge(new WeightedEdge<V>((V)it[i], (V)it[j], 0));
}
}
}
/**
* list of vertices in a path, in order
* @param edges list of the edges in the path
* @return list of vertices (without start vertex)
*/
private List<V> vertices (List<WeightedEdge<V>> edges)
{
List<V> nodes = new ArrayList<V>();
for (int i = 0; i < edges.size(); i++) {
nodes.add(edges.get(i).dest());
}
return nodes;
}
}
| true
| true
|
public List<V> approximateTour(Graph<V,WeightedEdge<V>> g, Set<V> verts, V start)
{
if (g == null || verts == null || start == null)
throw new NullPointerException("Null parameter to approximateTour()");
// Do we need to come back to the start node (true cycle)?
boolean backToStart = false;
if (!verts.contains(start)) {
backToStart = true;
}
else {
// we need start in there anyway, for the MST
verts = new HashSet<V>(verts);
verts.add(start);
}
// form basic graph of ONLY verts with all edges directly between vertices
Graph<V, WeightedEdge<V>> reduced = new MyDirectedGraph<V, WeightedEdge<V>>(verts);
Object[] it = verts.toArray();
for (int i = 0; i < it.length; i++) {
for (int j = 0; j < it.length; j++) {
if (i == j)
continue;
WeightedEdge<V> newEdge = g.adjacent((V)it[i], (V)it[j]);
if (newEdge != null)
reduced.addEdge(newEdge);
}
}
// add on edges that do not exist in current graph
for (int i = 0; i < it.length; i++) {
for (int j = 0; j < it.length; j++) {
if (j == i)
continue;
Path<V, WeightedEdge<V>> currentShortPath = dijkstra.shortestPath(reduced, (V)it[i], (V)it[j]);
Path<V, WeightedEdge<V>> shortPath = dijkstra.shortestPath(g, (V)it[i], (V)it[j]);
if (shortPath == null)
throw new IllegalArgumentException(); // cannot form mst
if (currentShortPath == null ||
shortPath.pathWeight() < currentShortPath.pathWeight()) {
reduced.addEdge(new WeightedEdge<V>((V)it[i], (V)it[j], shortPath.pathWeight()));
}
}
}
// Kruskal's to find MST of reduced graph
Graph<V,WeightedEdge<V>> mst = kruskal.MST(reduced);
// DFS to pre-order traversal of the MST
HashSet<V> visited = new HashSet<V>();
directedToUndirected(mst);
List<V> order = dfs(mst, start, visited); // the order in which we visit the nodes needed
order.add(start);
// list of all nodes we visit, in order, in our traversal
List<V> traversal = new ArrayList<V>();
for (int i = 1; i < order.size(); i++)
traversal.addAll(vertices(dijkstra.shortestPath(g, order.get(i - 1), order.get(i)).edges()));
if (backToStart)
traversal.addAll(vertices(dijkstra.shortestPath(g, order.get(order.size() - 1), start).edges()));
return traversal;
}
|
public List<V> approximateTour(Graph<V,WeightedEdge<V>> g, Set<V> verts, V start)
{
if (g == null || verts == null || start == null)
throw new NullPointerException("Null parameter to approximateTour()");
// Do we need to come back to the start node (true cycle)?
boolean backToStart = false;
if (verts.contains(start)) {
backToStart = true;
}
else {
// we need start in there anyway, for the MST
verts = new HashSet<V>(verts);
verts.add(start);
}
// form basic graph of ONLY verts with all edges directly between vertices
Graph<V, WeightedEdge<V>> reduced = new MyDirectedGraph<V, WeightedEdge<V>>(verts);
Object[] it = verts.toArray();
for (int i = 0; i < it.length; i++) {
for (int j = 0; j < it.length; j++) {
if (i == j)
continue;
WeightedEdge<V> newEdge = g.adjacent((V)it[i], (V)it[j]);
if (newEdge != null)
reduced.addEdge(newEdge);
}
}
// add on edges that do not exist in current graph
for (int i = 0; i < it.length; i++) {
for (int j = 0; j < it.length; j++) {
if (j == i)
continue;
Path<V, WeightedEdge<V>> currentShortPath = dijkstra.shortestPath(reduced, (V)it[i], (V)it[j]);
Path<V, WeightedEdge<V>> shortPath = dijkstra.shortestPath(g, (V)it[i], (V)it[j]);
if (shortPath == null)
throw new IllegalArgumentException(); // cannot form mst
if (currentShortPath == null ||
shortPath.pathWeight() < currentShortPath.pathWeight()) {
reduced.addEdge(new WeightedEdge<V>((V)it[i], (V)it[j], shortPath.pathWeight()));
}
}
}
// Kruskal's to find MST of reduced graph
Graph<V,WeightedEdge<V>> mst = kruskal.MST(reduced);
// DFS to pre-order traversal of the MST
HashSet<V> visited = new HashSet<V>();
directedToUndirected(mst);
List<V> order = dfs(mst, start, visited); // the order in which we visit the nodes needed
order.add(start);
// list of all nodes we visit, in order, in our traversal
List<V> traversal = new ArrayList<V>();
for (int i = 1; i < order.size(); i++)
traversal.addAll(vertices(dijkstra.shortestPath(g, order.get(i - 1), order.get(i)).edges()));
if (backToStart)
traversal.addAll(vertices(dijkstra.shortestPath(g, order.get(order.size() - 1), start).edges()));
return traversal;
}
|
diff --git a/APITestingCG/src/cz/cvut/fit/hybljan2/apitestingcg/generator/ExtenderGenerator.java b/APITestingCG/src/cz/cvut/fit/hybljan2/apitestingcg/generator/ExtenderGenerator.java
index b82f92f..7f4f34d 100644
--- a/APITestingCG/src/cz/cvut/fit/hybljan2/apitestingcg/generator/ExtenderGenerator.java
+++ b/APITestingCG/src/cz/cvut/fit/hybljan2/apitestingcg/generator/ExtenderGenerator.java
@@ -1,90 +1,90 @@
package cz.cvut.fit.hybljan2.apitestingcg.generator;
import cz.cvut.fit.hybljan2.apitestingcg.apimodel.*;
import cz.cvut.fit.hybljan2.apitestingcg.apimodel.APIItem.Kind;
import cz.cvut.fit.hybljan2.apitestingcg.apimodel.APIModifier.Modifier;
import cz.cvut.fit.hybljan2.apitestingcg.configuration.model.GeneratorJobConfiguration;
/**
*
* @author Jan Hybl
*/
public class ExtenderGenerator extends Generator {
@Override
public void generate(API api, GeneratorJobConfiguration jobConfiguration) {
for(APIPackage pkg : api.getPackages()) {
for(APIClass cls : pkg.getClasses()) {
// filter out final classes and annotations
if(!cls.getModifiers().contains(Modifier.FINAL) && !cls.getType().equals(Kind.ANNOTATION)) {
ClassGenerator cgen = new ClassGenerator();
cgen.addImport(cls.getFullName());
cgen.setPackageName(generateName(jobConfiguration.getOutputPackage(), pkg.getName()));
String pattern = null;
// if tested item is interface, create Implementator, otherwise Extender
if(cls.getType() == Kind.INTERFACE) {
pattern = configuration.getImplementerClassIdentifier();
cgen.addImplemening(cls.getName());
} else {
pattern = configuration.getExtenderClassIdentifier();
cgen.setExtending(cls.getName());
}
cgen.setName(generateName(pattern, cls.getName()));
// constructors tests
for(APIMethod constructor : cls.getConstructors()) {
MethodGenerator cnstr = new MethodGenerator();
cnstr.setModifiers("public");
cnstr.setName(cgen.getName());
cnstr.setParams(getMethodParamList(constructor));
StringBuilder sb = new StringBuilder();
sb.append("\t\tsuper(").append(getMethodParamNameList(cnstr.getParams())).append(");");
cnstr.setBody(sb.toString());
cgen.addConstructor(cnstr);
}
// method overriding tests
for(APIMethod method : cls.getMethods()) {
// filter out static and final methods - they can't be overriden
if(! (method.getModifiers().contains(Modifier.STATIC) || method.getModifiers().contains(Modifier.FINAL))) {
MethodGenerator mgen = new MethodGenerator();
mgen.setModifiers("public");
mgen.setName(method.getName());
mgen.setReturnType(method.getReturnType());
StringBuilder sb = new StringBuilder();
for (Modifier m : method.getModifiers()) {
if(!m.equals(Modifier.ABSTRACT)) {
sb.append(m.toString().toLowerCase()).append(" ");
}
}
mgen.setModifiers(sb.toString().trim());
mgen.setThrown(method.getThrown());
mgen.addAnotation("Override");
mgen.setParams(getMethodParamList(method));
mgen.setBody("\t\tthrow new UnsupportedOperationException();");
cgen.addMethod(mgen);
}
}
// protected field tests
for(APIField field : cls.getFields()) {
// if(field.getModifiers().contains(Modifier.PROTECTED)) {
- MethodGenerator ftmg = new FieldTestMehtodGenerator(cls, field, getInstance(field.getModifiers(), cls) + '.' + field.getName(), configuration);
+ MethodGenerator ftmg = new FieldTestMehtodGenerator(cls, field, field.getName(), configuration);
cgen.addMethod(ftmg);
// }
}
cgen.generateClassFile();
}
}
}
}
}
| true
| true
|
public void generate(API api, GeneratorJobConfiguration jobConfiguration) {
for(APIPackage pkg : api.getPackages()) {
for(APIClass cls : pkg.getClasses()) {
// filter out final classes and annotations
if(!cls.getModifiers().contains(Modifier.FINAL) && !cls.getType().equals(Kind.ANNOTATION)) {
ClassGenerator cgen = new ClassGenerator();
cgen.addImport(cls.getFullName());
cgen.setPackageName(generateName(jobConfiguration.getOutputPackage(), pkg.getName()));
String pattern = null;
// if tested item is interface, create Implementator, otherwise Extender
if(cls.getType() == Kind.INTERFACE) {
pattern = configuration.getImplementerClassIdentifier();
cgen.addImplemening(cls.getName());
} else {
pattern = configuration.getExtenderClassIdentifier();
cgen.setExtending(cls.getName());
}
cgen.setName(generateName(pattern, cls.getName()));
// constructors tests
for(APIMethod constructor : cls.getConstructors()) {
MethodGenerator cnstr = new MethodGenerator();
cnstr.setModifiers("public");
cnstr.setName(cgen.getName());
cnstr.setParams(getMethodParamList(constructor));
StringBuilder sb = new StringBuilder();
sb.append("\t\tsuper(").append(getMethodParamNameList(cnstr.getParams())).append(");");
cnstr.setBody(sb.toString());
cgen.addConstructor(cnstr);
}
// method overriding tests
for(APIMethod method : cls.getMethods()) {
// filter out static and final methods - they can't be overriden
if(! (method.getModifiers().contains(Modifier.STATIC) || method.getModifiers().contains(Modifier.FINAL))) {
MethodGenerator mgen = new MethodGenerator();
mgen.setModifiers("public");
mgen.setName(method.getName());
mgen.setReturnType(method.getReturnType());
StringBuilder sb = new StringBuilder();
for (Modifier m : method.getModifiers()) {
if(!m.equals(Modifier.ABSTRACT)) {
sb.append(m.toString().toLowerCase()).append(" ");
}
}
mgen.setModifiers(sb.toString().trim());
mgen.setThrown(method.getThrown());
mgen.addAnotation("Override");
mgen.setParams(getMethodParamList(method));
mgen.setBody("\t\tthrow new UnsupportedOperationException();");
cgen.addMethod(mgen);
}
}
// protected field tests
for(APIField field : cls.getFields()) {
// if(field.getModifiers().contains(Modifier.PROTECTED)) {
MethodGenerator ftmg = new FieldTestMehtodGenerator(cls, field, getInstance(field.getModifiers(), cls) + '.' + field.getName(), configuration);
cgen.addMethod(ftmg);
// }
}
cgen.generateClassFile();
}
}
}
}
|
public void generate(API api, GeneratorJobConfiguration jobConfiguration) {
for(APIPackage pkg : api.getPackages()) {
for(APIClass cls : pkg.getClasses()) {
// filter out final classes and annotations
if(!cls.getModifiers().contains(Modifier.FINAL) && !cls.getType().equals(Kind.ANNOTATION)) {
ClassGenerator cgen = new ClassGenerator();
cgen.addImport(cls.getFullName());
cgen.setPackageName(generateName(jobConfiguration.getOutputPackage(), pkg.getName()));
String pattern = null;
// if tested item is interface, create Implementator, otherwise Extender
if(cls.getType() == Kind.INTERFACE) {
pattern = configuration.getImplementerClassIdentifier();
cgen.addImplemening(cls.getName());
} else {
pattern = configuration.getExtenderClassIdentifier();
cgen.setExtending(cls.getName());
}
cgen.setName(generateName(pattern, cls.getName()));
// constructors tests
for(APIMethod constructor : cls.getConstructors()) {
MethodGenerator cnstr = new MethodGenerator();
cnstr.setModifiers("public");
cnstr.setName(cgen.getName());
cnstr.setParams(getMethodParamList(constructor));
StringBuilder sb = new StringBuilder();
sb.append("\t\tsuper(").append(getMethodParamNameList(cnstr.getParams())).append(");");
cnstr.setBody(sb.toString());
cgen.addConstructor(cnstr);
}
// method overriding tests
for(APIMethod method : cls.getMethods()) {
// filter out static and final methods - they can't be overriden
if(! (method.getModifiers().contains(Modifier.STATIC) || method.getModifiers().contains(Modifier.FINAL))) {
MethodGenerator mgen = new MethodGenerator();
mgen.setModifiers("public");
mgen.setName(method.getName());
mgen.setReturnType(method.getReturnType());
StringBuilder sb = new StringBuilder();
for (Modifier m : method.getModifiers()) {
if(!m.equals(Modifier.ABSTRACT)) {
sb.append(m.toString().toLowerCase()).append(" ");
}
}
mgen.setModifiers(sb.toString().trim());
mgen.setThrown(method.getThrown());
mgen.addAnotation("Override");
mgen.setParams(getMethodParamList(method));
mgen.setBody("\t\tthrow new UnsupportedOperationException();");
cgen.addMethod(mgen);
}
}
// protected field tests
for(APIField field : cls.getFields()) {
// if(field.getModifiers().contains(Modifier.PROTECTED)) {
MethodGenerator ftmg = new FieldTestMehtodGenerator(cls, field, field.getName(), configuration);
cgen.addMethod(ftmg);
// }
}
cgen.generateClassFile();
}
}
}
}
|
diff --git a/src/com/vmware/vim25/mo/ServiceInstance.java b/src/com/vmware/vim25/mo/ServiceInstance.java
index af7ea3f..e4e40b0 100644
--- a/src/com/vmware/vim25/mo/ServiceInstance.java
+++ b/src/com/vmware/vim25/mo/ServiceInstance.java
@@ -1,363 +1,363 @@
/*================================================================================
Copyright (c) 2008 VMware, Inc. All Rights Reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of VMware, Inc. nor the names of its contributors may be used
to endorse or promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL VMWARE, INC. OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
================================================================================*/
package com.vmware.vim25.mo;
import java.net.MalformedURLException;
import java.net.URL;
import java.rmi.RemoteException;
import java.util.Calendar;
import com.vmware.vim25.*;
import com.vmware.vim25.mo.util.*;
import com.vmware.vim25.ws.WSClient;
/**
* The managed object class corresponding to the one defined in VI SDK API reference.
* @author Steve JIN (sjin@vmware.com)
*/
public class ServiceInstance extends ManagedObject
{
private ServiceContent serviceContent = null;
final static ManagedObjectReference SERVICE_INSTANCE_MOR;
public final static String VIM25_NAMESPACE = " xmlns=\"urn:vim25\">";
public final static String VIM20_NAMESPACE = " xmlns=\"urn:vim2\">";
static
{
SERVICE_INSTANCE_MOR = new ManagedObjectReference();
SERVICE_INSTANCE_MOR.set_value("ServiceInstance");
SERVICE_INSTANCE_MOR.setType("ServiceInstance");
}
public ServiceInstance(URL url, String username, String password)
throws RemoteException, MalformedURLException
{
this(url, username, password, false);
}
public ServiceInstance(URL url, String username, String password, boolean ignoreCert)
throws RemoteException, MalformedURLException
{
this(url, username, password, ignoreCert, VIM25_NAMESPACE);
}
public ServiceInstance(URL url, String username, String password, boolean ignoreCert, String namespace)
throws RemoteException, MalformedURLException
{
if(url == null || username==null)
{
throw new NullPointerException("None of url, username can be null.");
}
setMOR(SERVICE_INSTANCE_MOR);
VimPortType vimService = new VimPortType(url.toString(), ignoreCert);
vimService.getWsc().setVimNameSpace(namespace);
serviceContent = vimService.retrieveServiceContent(SERVICE_INSTANCE_MOR);
vimService.getWsc().setSoapActionOnApiVersion(serviceContent.getAbout().getApiVersion());
setServerConnection(new ServerConnection(url, vimService, this));
// escape 5 special chars
// http://en.wikipedia.org/wiki/List_of_XML_and_HTML_character_entity_references
- password = password.replaceAll("<", "<")
- .replaceAll(">", ">")
- .replaceAll("&", "&")
- .replaceAll("\"", """)
- .replaceAll("'", "'");
+ password = password.replace("&", "&")
+ .replace("<", "<")
+ .replace(">", ">")
+ .replace("\"", """)
+ .replace("'", "'");
UserSession userSession = getSessionManager().login(username, password, null);
getServerConnection().setUserSession(userSession);
}
public ServiceInstance(URL url, String sessionStr, boolean ignoreCert)
throws RemoteException, MalformedURLException
{
this(url, sessionStr, ignoreCert, VIM25_NAMESPACE);
}
// sessionStr format: "vmware_soap_session=\"B3240D15-34DF-4BB8-B902-A844FDF42E85\""
public ServiceInstance(URL url, String sessionStr, boolean ignoreCert, String namespace)
throws RemoteException, MalformedURLException
{
if(url == null || sessionStr ==null)
{
throw new NullPointerException("None of url, session string can be null.");
}
setMOR(SERVICE_INSTANCE_MOR);
VimPortType vimService = new VimPortType(url.toString(), ignoreCert);
WSClient wsc = vimService.getWsc();
wsc.setCookie(sessionStr);
wsc.setVimNameSpace(namespace);
serviceContent = vimService.retrieveServiceContent(SERVICE_INSTANCE_MOR);
setServerConnection(new ServerConnection(url, vimService, this));
UserSession userSession = (UserSession) getSessionManager().getCurrentProperty("currentSession");
getServerConnection().setUserSession(userSession);
}
public ServiceInstance(ServerConnection sc)
{
super(sc, SERVICE_INSTANCE_MOR);
}
public Calendar getServerClock()
{
return (Calendar) getCurrentProperty("serverClock");
}
public Capability getCapability()
{
return (Capability) getCurrentProperty("capability");
}
public ClusterProfileManager getClusterProfileManager()
{
return (ClusterProfileManager) createMO(getServiceContent().getClusterProfileManager());
}
public Calendar currentTime() throws RuntimeFault, RemoteException
{
return getVimService().currentTime(getMOR());
}
public Folder getRootFolder()
{
return new Folder(this.getServerConnection(), this.getServiceContent().getRootFolder());
}
public HostVMotionCompatibility[] queryVMotionCompatibility(VirtualMachine vm, HostSystem[] hosts, String[] compatibility) throws RuntimeFault, RemoteException
{
if(vm==null || hosts==null)
{
throw new IllegalArgumentException("Neither vm or hosts can be null.");
}
return getVimService().queryVMotionCompatibility(getMOR(), vm.getMOR(), MorUtil.createMORs(hosts), compatibility);
}
public ProductComponentInfo[] retrieveProductComponents() throws RuntimeFault, RemoteException
{
return getVimService().retrieveProductComponents(getMOR());
}
private ServiceContent retrieveServiceContent() throws RuntimeFault, RemoteException
{
return getVimService().retrieveServiceContent(getMOR());
}
public Event[] validateMigration(VirtualMachine[] vms, VirtualMachinePowerState state, String[] testType
, ResourcePool pool, HostSystem host) throws InvalidState, RuntimeFault, RemoteException
{
if(vms==null)
{
throw new IllegalArgumentException("vms must not be null.");
}
return getVimService().validateMigration(getMOR(), MorUtil.createMORs(vms), state, testType,
pool==null? null: pool.getMOR(), host==null? null : host.getMOR());
}
public ServiceContent getServiceContent()
{
if(serviceContent == null)
{
try
{
serviceContent = retrieveServiceContent();
} catch(Exception e)
{
System.out.println("Exceptoin: " + e);
}
}
return serviceContent;
}
public AboutInfo getAboutInfo()
{
return getServiceContent().getAbout();
}
public AlarmManager getAlarmManager()
{
return (AlarmManager) createMO(getServiceContent().getAlarmManager());
}
public AuthorizationManager getAuthorizationManager()
{
return (AuthorizationManager) createMO(getServiceContent().getAuthorizationManager());
}
public CustomFieldsManager getCustomFieldsManager()
{
return (CustomFieldsManager) createMO(getServiceContent().getCustomFieldsManager());
}
public CustomizationSpecManager getCustomizationSpecManager()
{
return (CustomizationSpecManager) createMO(getServiceContent().getCustomizationSpecManager());
}
public EventManager getEventManager()
{
return (EventManager) createMO(getServiceContent().getEventManager());
}
public DiagnosticManager getDiagnosticManager()
{
return (DiagnosticManager) createMO(getServiceContent().getDiagnosticManager());
}
public DistributedVirtualSwitchManager getDistributedVirtualSwitchManager()
{
return (DistributedVirtualSwitchManager) createMO(getServiceContent().getDvSwitchManager());
}
public ExtensionManager getExtensionManager()
{
return (ExtensionManager) createMO(getServiceContent().getExtensionManager());
}
public FileManager getFileManager()
{
return (FileManager) createMO(getServiceContent().getFileManager());
}
public HostLocalAccountManager getAccountManager()
{
return (HostLocalAccountManager) createMO(getServiceContent().getAccountManager());
}
public LicenseManager getLicenseManager()
{
return (LicenseManager) createMO(getServiceContent().getLicenseManager());
}
public LocalizationManager getLocalizationManager()
{
return (LocalizationManager) createMO(getServiceContent().getLocalizationManager());
}
public PerformanceManager getPerformanceManager()
{
return (PerformanceManager) createMO(getServiceContent().getPerfManager());
}
public ProfileComplianceManager getProfileComplianceManager()
{
return (ProfileComplianceManager) createMO(getServiceContent().getComplianceManager());
}
public PropertyCollector getPropertyCollector()
{
return (PropertyCollector) createMO(getServiceContent().getPropertyCollector());
}
public ScheduledTaskManager getScheduledTaskManager()
{
return (ScheduledTaskManager) createMO(getServiceContent().getScheduledTaskManager());
}
public SearchIndex getSearchIndex()
{
return (SearchIndex) createMO(getServiceContent().getSearchIndex());
}
public SessionManager getSessionManager()
{
return (SessionManager) createMO(getServiceContent().getSessionManager());
}
public HostSnmpSystem getHostSnmpSystem()
{
return (HostSnmpSystem) createMO(getServiceContent().getSnmpSystem());
}
public HostProfileManager getHostProfileManager()
{
return (HostProfileManager) createMO(getServiceContent().getHostProfileManager());
}
public IpPoolManager getIpPoolManager()
{
return (IpPoolManager) createMO(getServiceContent().getIpPoolManager());
}
public VirtualMachineProvisioningChecker getVirtualMachineProvisioningChecker()
{
return (VirtualMachineProvisioningChecker) createMO(getServiceContent().getVmProvisioningChecker());
}
public VirtualMachineCompatibilityChecker getVirtualMachineCompatibilityChecker()
{
return (VirtualMachineCompatibilityChecker) createMO(getServiceContent().getVmCompatibilityChecker());
}
public TaskManager getTaskManager()
{
return (TaskManager) createMO(getServiceContent().getTaskManager());
}
public UserDirectory getUserDirectory()
{
return (UserDirectory) createMO(getServiceContent().getUserDirectory());
}
public ViewManager getViewManager()
{
return (ViewManager) createMO(getServiceContent().getViewManager());
}
public VirtualDiskManager getVirtualDiskManager()
{
return (VirtualDiskManager) createMO(getServiceContent().getVirtualDiskManager());
}
public OptionManager getOptionManager()
{
return (OptionManager) createMO(getServiceContent().getSetting());
}
public OvfManager getOvfManager()
{
return (OvfManager) createMO(getServiceContent().getOvfManager());
}
private ManagedObject createMO(ManagedObjectReference mor)
{
return MorUtil.createExactManagedObject(getServerConnection(), mor);
}
// TODO vim.VirtualizationManager is defined in servicecontent but no documentation there. Filed a bug already
}
| true
| true
|
public ServiceInstance(URL url, String username, String password, boolean ignoreCert, String namespace)
throws RemoteException, MalformedURLException
{
if(url == null || username==null)
{
throw new NullPointerException("None of url, username can be null.");
}
setMOR(SERVICE_INSTANCE_MOR);
VimPortType vimService = new VimPortType(url.toString(), ignoreCert);
vimService.getWsc().setVimNameSpace(namespace);
serviceContent = vimService.retrieveServiceContent(SERVICE_INSTANCE_MOR);
vimService.getWsc().setSoapActionOnApiVersion(serviceContent.getAbout().getApiVersion());
setServerConnection(new ServerConnection(url, vimService, this));
// escape 5 special chars
// http://en.wikipedia.org/wiki/List_of_XML_and_HTML_character_entity_references
password = password.replaceAll("<", "<")
.replaceAll(">", ">")
.replaceAll("&", "&")
.replaceAll("\"", """)
.replaceAll("'", "'");
UserSession userSession = getSessionManager().login(username, password, null);
getServerConnection().setUserSession(userSession);
}
|
public ServiceInstance(URL url, String username, String password, boolean ignoreCert, String namespace)
throws RemoteException, MalformedURLException
{
if(url == null || username==null)
{
throw new NullPointerException("None of url, username can be null.");
}
setMOR(SERVICE_INSTANCE_MOR);
VimPortType vimService = new VimPortType(url.toString(), ignoreCert);
vimService.getWsc().setVimNameSpace(namespace);
serviceContent = vimService.retrieveServiceContent(SERVICE_INSTANCE_MOR);
vimService.getWsc().setSoapActionOnApiVersion(serviceContent.getAbout().getApiVersion());
setServerConnection(new ServerConnection(url, vimService, this));
// escape 5 special chars
// http://en.wikipedia.org/wiki/List_of_XML_and_HTML_character_entity_references
password = password.replace("&", "&")
.replace("<", "<")
.replace(">", ">")
.replace("\"", """)
.replace("'", "'");
UserSession userSession = getSessionManager().login(username, password, null);
getServerConnection().setUserSession(userSession);
}
|
diff --git a/core/src/main/java/com/google/bitcoin/core/Block.java b/core/src/main/java/com/google/bitcoin/core/Block.java
index e273ae1..78b2439 100644
--- a/core/src/main/java/com/google/bitcoin/core/Block.java
+++ b/core/src/main/java/com/google/bitcoin/core/Block.java
@@ -1,941 +1,941 @@
/**
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.bitcoin.core;
import com.google.common.base.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.OutputStream;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import static com.google.bitcoin.core.Utils.doubleDigest;
import static com.google.bitcoin.core.Utils.doubleDigestTwoBuffers;
/**
* <p>A block is a group of transactions, and is one of the fundamental data structures of the Bitcoin system.
* It records a set of {@link Transaction}s together with some data that links it into a place in the global block
* chain, and proves that a difficult calculation was done over its contents. See
* <a href="http://www.bitcoin.org/bitcoin.pdf">the Bitcoin technical paper</a> for
* more detail on blocks. <p/>
*
* To get a block, you can either build one from the raw bytes you can get from another implementation, or request one
* specifically using {@link Peer#getBlock(Sha256Hash)}, or grab one from a downloaded {@link BlockChain}.
*/
public class Block extends Message {
private static final Logger log = LoggerFactory.getLogger(Block.class);
private static final long serialVersionUID = 2738848929966035281L;
/** How many bytes are required to represent a block header. */
public static final int HEADER_SIZE = 80;
static final long ALLOWED_TIME_DRIFT = 2 * 60 * 60; // Same value as official client.
/** A value for difficultyTarget (nBits) that allows half of all possible hash solutions. Used in unit testing. */
static final long EASIEST_DIFFICULTY_TARGET = 0x207fFFFFL;
// For unit testing. If not zero, use this instead of the current time.
static long fakeClock = 0;
// Fields defined as part of the protocol format.
private long version;
private Sha256Hash prevBlockHash;
private Sha256Hash merkleRoot;
private long time;
private long difficultyTarget; // "nBits"
private long nonce;
/** If null, it means this object holds only the headers. */
List<Transaction> transactions;
/** Stores the hash of the block. If null, getHash() will recalculate it. */
private transient Sha256Hash hash;
private transient boolean headerParsed;
private transient boolean transactionsParsed;
private transient boolean headerBytesValid;
private transient boolean transactionBytesValid;
/** Special case constructor, used for the genesis node, cloneAsHeader and unit tests. */
Block(NetworkParameters params) {
super(params);
// Set up a few basic things. We are not complete after this though.
version = 1;
difficultyTarget = 0x1d07fff8L;
time = System.currentTimeMillis() / 1000;
prevBlockHash = Sha256Hash.ZERO_HASH;
length = 80;
}
/** Constructs a block object from the Bitcoin wire format. */
public Block(NetworkParameters params, byte[] payloadBytes) throws ProtocolException {
super(params, payloadBytes, 0, false, false, payloadBytes.length);
}
/**
* Contruct a block object from the BitCoin wire format.
* @param params NetworkParameters object.
* @param parseLazy Whether to perform a full parse immediately or delay until a read is requested.
* @param parseRetain Whether to retain the backing byte array for quick reserialization.
* If true and the backing byte array is invalidated due to modification of a field then
* the cached bytes may be repopulated and retained if the message is serialized again in the future.
* @param length The length of message if known. Usually this is provided when deserializing of the wire
* as the length will be provided as part of the header. If unknown then set to Message.UNKNOWN_LENGTH
* @throws ProtocolException
*/
public Block(NetworkParameters params, byte[] payloadBytes, boolean parseLazy, boolean parseRetain, int length)
throws ProtocolException {
super(params, payloadBytes, 0, parseLazy, parseRetain, length);
}
private void readObject(ObjectInputStream ois) throws ClassNotFoundException, IOException {
ois.defaultReadObject();
// This code is not actually necessary, as transient fields are initialized to the default value which is in
// this case null. However it clears out a FindBugs warning and makes it explicit what we're doing.
hash = null;
}
private void parseHeader() {
if (headerParsed)
return;
cursor = offset;
version = readUint32();
prevBlockHash = readHash();
merkleRoot = readHash();
time = readUint32();
difficultyTarget = readUint32();
nonce = readUint32();
hash = new Sha256Hash(Utils.reverseBytes(Utils.doubleDigest(bytes, offset, cursor)));
headerParsed = true;
headerBytesValid = parseRetain;
}
private void parseTransactions() throws ProtocolException {
if (transactionsParsed)
return;
cursor = offset + HEADER_SIZE;
if (bytes.length == cursor) {
// This message is just a header, it has no transactions.
transactionsParsed = true;
transactionBytesValid = false;
return;
}
int numTransactions = (int) readVarInt();
transactions = new ArrayList<Transaction>(numTransactions);
for (int i = 0; i < numTransactions; i++) {
Transaction tx = new Transaction(params, bytes, cursor, this, parseLazy, parseRetain, UNKNOWN_LENGTH);
transactions.add(tx);
cursor += tx.getMessageSize();
}
// No need to set length here. If length was not provided then it should be set at the end of parseLight().
// If this is a genuine lazy parse then length must have been provided to the constructor.
transactionsParsed = true;
transactionBytesValid = parseRetain;
}
void parse() throws ProtocolException {
parseHeader();
parseTransactions();
length = cursor - offset;
}
protected void parseLite() throws ProtocolException {
// Ignore the header since it has fixed length. If length is not provided we will have to
// invoke a light parse of transactions to calculate the length.
if (length == UNKNOWN_LENGTH) {
Preconditions.checkState(parseLazy,
"Performing lite parse of block transaction as block was initialised from byte array " +
"without providing length. This should never need to happen.");
parseTransactions();
length = cursor - offset;
} else {
transactionBytesValid = !transactionsParsed || parseRetain && length > HEADER_SIZE;
}
headerBytesValid = !headerParsed || parseRetain && length >= HEADER_SIZE;
}
/*
* Block uses some special handling for lazy parsing and retention of cached bytes. Parsing and serializing the
* block header and the transaction list are both non-trivial so there are good efficiency gains to be had by
* separating them. There are many cases where a user may need access to access or change one or the other but not both.
*
* With this in mind we ignore the inherited checkParse() and unCache() methods and implement a separate version
* of them for both header and transactions.
*
* Serializing methods are also handled in their own way. Whilst they deal with separate parts of the block structure
* there are some interdependencies. For example altering a tx requires invalidating the Merkle root and therefore
* the cached header bytes.
*/
private void maybeParseHeader() {
if (headerParsed || bytes == null)
return;
parseHeader();
if (!(headerBytesValid || transactionBytesValid))
bytes = null;
}
private void maybeParseTransactions() {
if (transactionsParsed || bytes == null)
return;
try {
parseTransactions();
if (!parseRetain) {
transactionBytesValid = false;
if (headerParsed)
bytes = null;
}
} catch (ProtocolException e) {
throw new LazyParseException(
"ProtocolException caught during lazy parse. For safe access to fields call ensureParsed before attempting read or write access",
e);
}
}
/**
* Ensure the object is parsed if needed. This should be called in every getter before returning a value. If the
* lazy parse flag is not set this is a method returns immediately.
*/
protected void maybeParse() {
throw new LazyParseException(
"checkParse() should never be called on a Block. Instead use checkParseHeader() and checkParseTransactions()");
}
/**
* In lazy parsing mode access to getters and setters may throw an unchecked LazyParseException. If guaranteed
* safe access is required this method will force parsing to occur immediately thus ensuring LazyParseExeption will
* never be thrown from this Message. If the Message contains child messages (e.g. a Block containing Transaction
* messages) this will not force child messages to parse.
*
* This method ensures parsing of both headers and transactions.
*
* @throws ProtocolException
*/
public void ensureParsed() throws ProtocolException {
try {
maybeParseHeader();
maybeParseTransactions();
} catch (LazyParseException e) {
if (e.getCause() instanceof ProtocolException)
throw (ProtocolException) e.getCause();
throw new ProtocolException(e);
}
}
/**
* In lazy parsing mode access to getters and setters may throw an unchecked LazyParseException. If guaranteed
* safe access is required this method will force parsing to occur immediately thus ensuring LazyParseExeption
* will never be thrown from this Message. If the Message contains child messages (e.g. a Block containing
* Transaction messages) this will not force child messages to parse.
*
* This method ensures parsing of headers only.
*
* @throws ProtocolException
*/
public void ensureParsedHeader() throws ProtocolException {
try {
maybeParseHeader();
} catch (LazyParseException e) {
if (e.getCause() instanceof ProtocolException)
throw (ProtocolException) e.getCause();
throw new ProtocolException(e);
}
}
/**
* In lazy parsing mode access to getters and setters may throw an unchecked LazyParseException. If guaranteed
* safe access is required this method will force parsing to occur immediately thus ensuring LazyParseExeption will
* never be thrown from this Message. If the Message contains child messages (e.g. a Block containing Transaction
* messages) this will not force child messages to parse.
*
* This method ensures parsing of transactions only.
*
* @throws ProtocolException
*/
public void ensureParsedTransactions() throws ProtocolException {
try {
maybeParseTransactions();
} catch (LazyParseException e) {
if (e.getCause() instanceof ProtocolException)
throw (ProtocolException) e.getCause();
throw new ProtocolException(e);
}
}
private void writeHeader(OutputStream stream) throws IOException {
// try for cached write first
if (headerBytesValid && bytes != null && bytes.length >= offset + HEADER_SIZE) {
stream.write(bytes, offset, HEADER_SIZE);
return;
}
// fall back to manual write
maybeParseHeader();
Utils.uint32ToByteStreamLE(version, stream);
stream.write(Utils.reverseBytes(prevBlockHash.getBytes()));
stream.write(Utils.reverseBytes(getMerkleRoot().getBytes()));
Utils.uint32ToByteStreamLE(time, stream);
Utils.uint32ToByteStreamLE(difficultyTarget, stream);
Utils.uint32ToByteStreamLE(nonce, stream);
}
private void writeTransactions(OutputStream stream) throws IOException {
// check for no transaction conditions first
// must be a more efficient way to do this but I'm tired atm.
if (transactions == null && transactionsParsed) {
return;
}
// confirmed we must have transactions either cached or as objects.
if (transactionBytesValid && bytes != null && bytes.length >= offset + length) {
stream.write(bytes, offset + HEADER_SIZE, length - HEADER_SIZE);
return;
}
if (transactions != null) {
stream.write(new VarInt(transactions.size()).encode());
for (Transaction tx : transactions) {
tx.bitcoinSerialize(stream);
}
}
}
/**
* Special handling to check if we have a valid byte array for both header
* and transactions
*
* @throws IOException
*/
public byte[] bitcoinSerialize() {
// we have completely cached byte array.
if (headerBytesValid && transactionBytesValid) {
Preconditions.checkNotNull(bytes, "Bytes should never be null if headerBytesValid && transactionBytesValid");
if (length == bytes.length) {
return bytes;
} else {
// byte array is offset so copy out the correct range.
byte[] buf = new byte[length];
System.arraycopy(bytes, offset, buf, 0, length);
return buf;
}
}
// At least one of the two cacheable components is invalid
// so fall back to stream write since we can't be sure of the length.
ByteArrayOutputStream stream = new UnsafeByteArrayOutputStream(length == UNKNOWN_LENGTH ? HEADER_SIZE + guessTransactionsLength() : length);
try {
writeHeader(stream);
writeTransactions(stream);
} catch (IOException e) {
// Cannot happen, we are serializing to a memory stream.
}
return stream.toByteArray();
}
@Override
protected void bitcoinSerializeToStream(OutputStream stream) throws IOException {
writeHeader(stream);
// We may only have enough data to write the header.
writeTransactions(stream);
}
/**
* Provides a reasonable guess at the byte length of the transactions part of the block.
* The returned value will be accurate in 99% of cases and in those cases where not will probably slightly
* oversize.
*
* This is used to preallocate the underlying byte array for a ByteArrayOutputStream. If the size is under the
* real value the only penalty is resizing of the underlying byte array.
*/
private int guessTransactionsLength() {
if (transactionBytesValid)
return bytes.length - HEADER_SIZE;
if (transactions == null)
return 0;
int len = VarInt.sizeOf(transactions.size());
for (Transaction tx : transactions) {
// 255 is just a guess at an average tx length
len += tx.length == UNKNOWN_LENGTH ? 255 : tx.length;
}
return len;
}
protected void unCache() {
// Since we have alternate uncache methods to use internally this will only ever be called by a child
// transaction so we only need to invalidate that part of the cache.
unCacheTransactions();
}
private void unCacheHeader() {
maybeParseHeader();
headerBytesValid = false;
if (!transactionBytesValid)
bytes = null;
hash = null;
checksum = null;
}
private void unCacheTransactions() {
maybeParseTransactions();
transactionBytesValid = false;
if (!headerBytesValid)
bytes = null;
// Current implementation has to uncache headers as well as any change to a tx will alter the merkle root. In
// future we can go more granular and cache merkle root separately so rest of the header does not need to be
// rewritten.
unCacheHeader();
// Clear merkleRoot last as it may end up being parsed during unCacheHeader().
merkleRoot = null;
}
/**
* Calculates the block hash by serializing the block and hashing the
* resulting bytes.
*/
private Sha256Hash calculateHash() {
try {
ByteArrayOutputStream bos = new UnsafeByteArrayOutputStream(HEADER_SIZE);
writeHeader(bos);
return new Sha256Hash(Utils.reverseBytes(doubleDigest(bos.toByteArray())));
} catch (IOException e) {
throw new RuntimeException(e); // Cannot happen.
}
}
/**
* Returns the hash of the block (which for a valid, solved block should be below the target) in the form seen on
* the block explorer. If you call this on block 1 in the production chain
* you will get "00000000839a8e6886ab5951d76f411475428afc90947ee320161bbf18eb6048".
*/
public String getHashAsString() {
return getHash().toString();
}
/**
* Returns the hash of the block (which for a valid, solved block should be
* below the target). Big endian.
*/
public Sha256Hash getHash() {
if (hash == null)
hash = calculateHash();
return hash;
}
/**
* The number that is one greater than the largest representable SHA-256
* hash.
*/
static private BigInteger LARGEST_HASH = BigInteger.ONE.shiftLeft(256);
/**
* Returns the work represented by this block.<p>
*
* Work is defined as the number of tries needed to solve a block in the
* average case. Consider a difficulty target that covers 5% of all possible
* hash values. Then the work of the block will be 20. As the target gets
* lower, the amount of work goes up.
*/
public BigInteger getWork() throws VerificationException {
BigInteger target = getDifficultyTargetAsInteger();
return LARGEST_HASH.divide(target.add(BigInteger.ONE));
}
/** Returns a copy of the block, but without any transactions. */
public Block cloneAsHeader() {
maybeParseHeader();
Block block = new Block(params);
block.nonce = nonce;
block.prevBlockHash = prevBlockHash.duplicate();
block.merkleRoot = getMerkleRoot().duplicate();
block.version = version;
block.time = time;
block.difficultyTarget = difficultyTarget;
block.transactions = null;
block.hash = getHash().duplicate();
return block;
}
/**
* Returns a multi-line string containing a description of the contents of
* the block. Use for debugging purposes only.
*/
@Override
public String toString() {
StringBuffer s = new StringBuffer("v" + version + " block: \n" + " previous block: "
+ prevBlockHash.toString() + "\n" + " merkle root: " + getMerkleRoot().toString() + "\n"
+ " time: [" + time + "] " + new Date(time * 1000).toString() + "\n"
+ " difficulty target (nBits): " + difficultyTarget + "\n" + " nonce: " + nonce + "\n");
if (transactions != null && transactions.size() > 0) {
s.append(" with ").append(transactions.size()).append(" transaction(s):\n");
for (Transaction tx : transactions) {
s.append(tx.toString());
}
}
return s.toString();
}
/**
* Finds a value of nonce that makes the blocks hash lower than the difficulty target. This is called mining, but
* solve() is far too slow to do real mining with. It exists only for unit testing purposes and is not a part of
* the public API.
*
* This can loop forever if a solution cannot be found solely by incrementing nonce. It doesn't change extraNonce.
*/
void solve() {
maybeParseHeader();
while (true) {
try {
// Is our proof of work valid yet?
if (checkProofOfWork(false))
return;
// No, so increment the nonce and try again.
setNonce(getNonce() + 1);
} catch (VerificationException e) {
throw new RuntimeException(e); // Cannot happen.
}
}
}
/**
* Returns the difficulty target as a 256 bit value that can be compared to a SHA-256 hash. Inside a block the
* target is represented using a compact form. If this form decodes to a value that is out of bounds, an exception
* is thrown.
*/
public BigInteger getDifficultyTargetAsInteger() throws VerificationException {
maybeParseHeader();
BigInteger target = Utils.decodeCompactBits(difficultyTarget);
if (target.compareTo(BigInteger.valueOf(0)) <= 0 || target.compareTo(params.proofOfWorkLimit) > 0)
throw new VerificationException("Difficulty target is bad: " + target.toString());
return target;
}
/** Returns true if the hash of the block is OK (lower than difficulty target). */
private boolean checkProofOfWork(boolean throwException) throws VerificationException {
// This part is key - it is what proves the block was as difficult to make as it claims
// to be. Note however that in the context of this function, the block can claim to be
// as difficult as it wants to be .... if somebody was able to take control of our network
// connection and fork us onto a different chain, they could send us valid blocks with
// ridiculously easy difficulty and this function would accept them.
//
// To prevent this attack from being possible, elsewhere we check that the difficultyTarget
// field is of the right value. This requires us to have the preceeding blocks.
BigInteger target = getDifficultyTargetAsInteger();
BigInteger h = getHash().toBigInteger();
if (h.compareTo(target) > 0) {
// Proof of work check failed!
if (throwException)
throw new VerificationException("Hash is higher than target: " + getHashAsString() + " vs "
+ target.toString(16));
else
return false;
}
return true;
}
private void checkTimestamp() throws VerificationException {
maybeParseHeader();
// Allow injection of a fake clock to allow unit testing.
long currentTime = fakeClock != 0 ? fakeClock : System.currentTimeMillis() / 1000;
if (time > currentTime + ALLOWED_TIME_DRIFT)
throw new VerificationException("Block too far in future");
}
private void checkMerkleRoot() throws VerificationException {
Sha256Hash calculatedRoot = calculateMerkleRoot();
if (!calculatedRoot.equals(merkleRoot)) {
log.error("Merkle tree did not verify");
throw new VerificationException("Merkle hashes do not match: " + calculatedRoot + " vs " + merkleRoot);
}
}
private Sha256Hash calculateMerkleRoot() {
List<byte[]> tree = buildMerkleTree();
return new Sha256Hash(tree.get(tree.size() - 1));
}
private List<byte[]> buildMerkleTree() {
// The Merkle root is based on a tree of hashes calculated from the transactions:
//
// root
// / \
// A B
// / \ / \
// t1 t2 t3 t4
//
// The tree is represented as a list: t1,t2,t3,t4,A,B,root where each
// entry is a hash.
//
// The hashing algorithm is double SHA-256. The leaves are a hash of the serialized contents of the transaction.
// The interior nodes are hashes of the concenation of the two child hashes.
//
// This structure allows the creation of proof that a transaction was included into a block without having to
// provide the full block contents. Instead, you can provide only a Merkle branch. For example to prove tx2 was
// in a block you can just provide tx2, the hash(tx1) and B. Now the other party has everything they need to
// derive the root, which can be checked against the block header. These proofs aren't used right now but
// will be helpful later when we want to download partial block contents.
//
// Note that if the number of transactions is not even the last tx is repeated to make it so (see
// tx3 above). A tree with 5 transactions would look like this:
//
// root
- // / \
- // 1 \
- // / \ \
- // 2 3 4
+ // / \
+ // 1 5
+ // / \ / \
+ // 2 3 4 4
// / \ / \ / \
// t1 t2 t3 t4 t5 t5
maybeParseTransactions();
ArrayList<byte[]> tree = new ArrayList<byte[]>();
// Start by adding all the hashes of the transactions as leaves of the tree.
for (Transaction t : transactions) {
tree.add(t.getHash().getBytes());
}
int levelOffset = 0; // Offset in the list where the currently processed level starts.
// Step through each level, stopping when we reach the root (levelSize == 1).
for (int levelSize = transactions.size(); levelSize > 1; levelSize = (levelSize + 1) / 2) {
// For each pair of nodes on that level:
for (int left = 0; left < levelSize; left += 2) {
// The right hand node can be the same as the left hand, in the case where we don't have enough
// transactions.
int right = Math.min(left + 1, levelSize - 1);
byte[] leftBytes = Utils.reverseBytes(tree.get(levelOffset + left));
byte[] rightBytes = Utils.reverseBytes(tree.get(levelOffset + right));
tree.add(Utils.reverseBytes(doubleDigestTwoBuffers(leftBytes, 0, 32, rightBytes, 0, 32)));
}
// Move to the next level.
levelOffset += levelSize;
}
return tree;
}
private void checkTransactions() throws VerificationException {
// The first transaction in a block must always be a coinbase transaction.
if (!transactions.get(0).isCoinBase())
throw new VerificationException("First tx is not coinbase");
// The rest must not be.
for (int i = 1; i < transactions.size(); i++) {
if (transactions.get(i).isCoinBase())
throw new VerificationException("TX " + i + " is coinbase when it should not be.");
}
}
/**
* Checks the block data to ensure it follows the rules laid out in the network parameters. Specifically,
* throws an exception if the proof of work is invalid, or if the timestamp is too far from what it should be.
* This is <b>not</b> everything that is required for a block to be valid, only what is checkable independent
* of the chain and without a transaction index.
*
* @throws VerificationException
*/
public void verifyHeader() throws VerificationException {
// Prove that this block is OK. It might seem that we can just ignore most of these checks given that the
// network is also verifying the blocks, but we cannot as it'd open us to a variety of obscure attacks.
//
// Firstly we need to ensure this block does in fact represent real work done. If the difficulty is high
// enough, it's probably been done by the network.
maybeParseHeader();
checkProofOfWork(true);
checkTimestamp();
}
/**
* Checks the block contents
*
* @throws VerificationException
*/
public void verifyTransactions() throws VerificationException {
// Now we need to check that the body of the block actually matches the headers. The network won't generate
// an invalid block, but if we didn't validate this then an untrusted man-in-the-middle could obtain the next
// valid block from the network and simply replace the transactions in it with their own fictional
// transactions that reference spent or non-existant inputs.
Preconditions.checkState(!transactions.isEmpty());
maybeParseTransactions();
checkTransactions();
checkMerkleRoot();
}
/**
* Verifies both the header and that the transactions hash to the merkle root.
*/
public void verify() throws VerificationException {
verifyHeader();
verifyTransactions();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Block))
return false;
Block other = (Block) o;
return getHash().equals(other.getHash());
}
@Override
public int hashCode() {
return getHash().hashCode();
}
/**
* Returns the merkle root in big endian form, calculating it from transactions if necessary.
*/
public Sha256Hash getMerkleRoot() {
maybeParseHeader();
if (merkleRoot == null) {
//TODO check if this is really necessary.
unCacheHeader();
merkleRoot = calculateMerkleRoot();
}
return merkleRoot;
}
/** Exists only for unit testing. */
void setMerkleRoot(Sha256Hash value) {
unCacheHeader();
merkleRoot = value;
hash = null;
}
/** Adds a transaction to this block. The nonce and merkle root are invalid after this. */
public void addTransaction(Transaction t) {
unCacheTransactions();
if (transactions == null) {
transactions = new ArrayList<Transaction>();
}
t.setParent(this);
if (transactions.size() == 0 && !t.isCoinBase())
throw new RuntimeException("Attempted to add a non-coinbase transaction as the first transaction: " + t);
else if (transactions.size() > 0 && t.isCoinBase())
throw new RuntimeException("Attempted to add a coinbase transaction when there already is one: " + t);
transactions.add(t);
adjustLength(t.length);
// Force a recalculation next time the values are needed.
merkleRoot = null;
hash = null;
}
/** Returns the version of the block data structure as defined by the BitCoin protocol. */
public long getVersion() {
maybeParseHeader();
return version;
}
/**
* Returns the hash of the previous block in the chain, as defined by the block header.
*/
public Sha256Hash getPrevBlockHash() {
maybeParseHeader();
return prevBlockHash;
}
void setPrevBlockHash(Sha256Hash prevBlockHash) {
unCacheHeader();
this.prevBlockHash = prevBlockHash;
this.hash = null;
}
/**
* Returns the time at which the block was solved and broadcast, according to the clock of the solving node. This
* is measured in seconds since the UNIX epoch (midnight Jan 1st 1970).
*/
public long getTimeSeconds() {
maybeParseHeader();
return time;
}
/**
* Returns the time at which the block was solved and broadcast, according to the clock of the solving node.
*/
public Date getTime() {
return new Date(getTimeSeconds()*1000);
}
void setTime(long time) {
unCacheHeader();
this.time = time;
this.hash = null;
}
/**
* Returns the difficulty of the proof of work that this block should meet encoded in compact form. The {@link
* BlockChain} verifies that this is not too easy by looking at the length of the chain when the block is added.
* To find the actual value the hash should be compared against, use getDifficultyTargetBI.
*/
public long getDifficultyTarget() {
maybeParseHeader();
return difficultyTarget;
}
void setDifficultyTarget(long compactForm) {
unCacheHeader();
this.difficultyTarget = compactForm;
this.hash = null;
}
/**
* Returns the nonce, an arbitrary value that exists only to make the hash of the block header fall below the
* difficulty target.
*/
public long getNonce() {
maybeParseHeader();
return nonce;
}
void setNonce(long nonce) {
unCacheHeader();
this.nonce = nonce;
this.hash = null;
}
public List<Transaction> getTransactions() {
maybeParseTransactions();
return Collections.unmodifiableList(transactions);
}
// ///////////////////////////////////////////////////////////////////////////////////////////////
// Unit testing related methods.
// Used to make transactions unique.
static private int txCounter;
/** Adds a coinbase transaction to the block. This exists for unit tests. */
void addCoinbaseTransaction(byte[] pubKeyTo) {
unCacheTransactions();
transactions = new ArrayList<Transaction>();
Transaction coinbase = new Transaction(params);
// A real coinbase transaction has some stuff in the scriptSig like the extraNonce and difficulty. The
// transactions are distinguished by every TX output going to a different key.
//
// Here we will do things a bit differently so a new address isn't needed every time. We'll put a simple
// counter in the scriptSig so every transaction has a different hash.
coinbase.addInput(new TransactionInput(params, coinbase, new byte[]{(byte) txCounter++}));
coinbase.addOutput(new TransactionOutput(params, coinbase, Script.createOutputScript(pubKeyTo)));
transactions.add(coinbase);
}
static final byte[] EMPTY_BYTES = new byte[32];
/**
* Returns a solved block that builds on top of this one. This exists for unit tests.
*/
Block createNextBlock(Address to, long time) {
return createNextBlock(to, time, EMPTY_BYTES);
}
/**
* Returns a solved block that builds on top of this one. This exists for unit tests.
* In this variant you can specify a public key (pubkey) for use in generating coinbase blocks.
*/
Block createNextBlock(Address to, long time, byte[] pubKey) {
Block b = new Block(params);
b.setDifficultyTarget(difficultyTarget);
b.addCoinbaseTransaction(pubKey);
if (to != null) {
// Add a transaction paying 50 coins to the "to" address.
Transaction t = new Transaction(params);
t.addOutput(new TransactionOutput(params, t, Utils.toNanoCoins(50, 0), to));
// The input does not really need to be a valid signature, as long as it has the right general form.
TransactionInput input = new TransactionInput(params, t, Script.createInputScript(EMPTY_BYTES, EMPTY_BYTES));
// Importantly the outpoint hash cannot be zero as that's how we detect a coinbase transaction in isolation
// but it must be unique to avoid 'different' transactions looking the same.
byte[] counter = new byte[32];
counter[0] = (byte) txCounter++;
counter[1] = 1;
input.getOutpoint().setHash(new Sha256Hash(counter));
t.addInput(input);
b.addTransaction(t);
}
b.setPrevBlockHash(getHash());
b.setTime(time);
b.solve();
try {
b.verifyHeader();
} catch (VerificationException e) {
throw new RuntimeException(e); // Cannot happen.
}
return b;
}
// Visible for testing.
public Block createNextBlock(Address to) {
return createNextBlock(to, Utils.now().getTime() / 1000);
}
/**
* Create a block sending 50BTC as a coinbase transaction to the public key specified.
* This method is intended for test use only.
*/
Block createNextBlockWithCoinbase(byte[] pubKey) {
return createNextBlock(null, Utils.now().getTime() / 1000, pubKey);
}
/**
* Used for unit test
*
* @return the headerParsed
*/
boolean isParsedHeader() {
return headerParsed;
}
/**
* Used for unit test
*
* @return the transactionsParsed
*/
boolean isParsedTransactions() {
return transactionsParsed;
}
/**
* Used for unit test
*
* @return the headerBytesValid
*/
boolean isHeaderBytesValid() {
return headerBytesValid;
}
/**
* Used for unit test
*
* @return the transactionBytesValid
*/
boolean isTransactionBytesValid() {
return transactionBytesValid;
}
}
| true
| true
|
private List<byte[]> buildMerkleTree() {
// The Merkle root is based on a tree of hashes calculated from the transactions:
//
// root
// / \
// A B
// / \ / \
// t1 t2 t3 t4
//
// The tree is represented as a list: t1,t2,t3,t4,A,B,root where each
// entry is a hash.
//
// The hashing algorithm is double SHA-256. The leaves are a hash of the serialized contents of the transaction.
// The interior nodes are hashes of the concenation of the two child hashes.
//
// This structure allows the creation of proof that a transaction was included into a block without having to
// provide the full block contents. Instead, you can provide only a Merkle branch. For example to prove tx2 was
// in a block you can just provide tx2, the hash(tx1) and B. Now the other party has everything they need to
// derive the root, which can be checked against the block header. These proofs aren't used right now but
// will be helpful later when we want to download partial block contents.
//
// Note that if the number of transactions is not even the last tx is repeated to make it so (see
// tx3 above). A tree with 5 transactions would look like this:
//
// root
// / \
// 1 \
// / \ \
// 2 3 4
// / \ / \ / \
// t1 t2 t3 t4 t5 t5
maybeParseTransactions();
ArrayList<byte[]> tree = new ArrayList<byte[]>();
// Start by adding all the hashes of the transactions as leaves of the tree.
for (Transaction t : transactions) {
tree.add(t.getHash().getBytes());
}
int levelOffset = 0; // Offset in the list where the currently processed level starts.
// Step through each level, stopping when we reach the root (levelSize == 1).
for (int levelSize = transactions.size(); levelSize > 1; levelSize = (levelSize + 1) / 2) {
// For each pair of nodes on that level:
for (int left = 0; left < levelSize; left += 2) {
// The right hand node can be the same as the left hand, in the case where we don't have enough
// transactions.
int right = Math.min(left + 1, levelSize - 1);
byte[] leftBytes = Utils.reverseBytes(tree.get(levelOffset + left));
byte[] rightBytes = Utils.reverseBytes(tree.get(levelOffset + right));
tree.add(Utils.reverseBytes(doubleDigestTwoBuffers(leftBytes, 0, 32, rightBytes, 0, 32)));
}
// Move to the next level.
levelOffset += levelSize;
}
return tree;
}
|
private List<byte[]> buildMerkleTree() {
// The Merkle root is based on a tree of hashes calculated from the transactions:
//
// root
// / \
// A B
// / \ / \
// t1 t2 t3 t4
//
// The tree is represented as a list: t1,t2,t3,t4,A,B,root where each
// entry is a hash.
//
// The hashing algorithm is double SHA-256. The leaves are a hash of the serialized contents of the transaction.
// The interior nodes are hashes of the concenation of the two child hashes.
//
// This structure allows the creation of proof that a transaction was included into a block without having to
// provide the full block contents. Instead, you can provide only a Merkle branch. For example to prove tx2 was
// in a block you can just provide tx2, the hash(tx1) and B. Now the other party has everything they need to
// derive the root, which can be checked against the block header. These proofs aren't used right now but
// will be helpful later when we want to download partial block contents.
//
// Note that if the number of transactions is not even the last tx is repeated to make it so (see
// tx3 above). A tree with 5 transactions would look like this:
//
// root
// / \
// 1 5
// / \ / \
// 2 3 4 4
// / \ / \ / \
// t1 t2 t3 t4 t5 t5
maybeParseTransactions();
ArrayList<byte[]> tree = new ArrayList<byte[]>();
// Start by adding all the hashes of the transactions as leaves of the tree.
for (Transaction t : transactions) {
tree.add(t.getHash().getBytes());
}
int levelOffset = 0; // Offset in the list where the currently processed level starts.
// Step through each level, stopping when we reach the root (levelSize == 1).
for (int levelSize = transactions.size(); levelSize > 1; levelSize = (levelSize + 1) / 2) {
// For each pair of nodes on that level:
for (int left = 0; left < levelSize; left += 2) {
// The right hand node can be the same as the left hand, in the case where we don't have enough
// transactions.
int right = Math.min(left + 1, levelSize - 1);
byte[] leftBytes = Utils.reverseBytes(tree.get(levelOffset + left));
byte[] rightBytes = Utils.reverseBytes(tree.get(levelOffset + right));
tree.add(Utils.reverseBytes(doubleDigestTwoBuffers(leftBytes, 0, 32, rightBytes, 0, 32)));
}
// Move to the next level.
levelOffset += levelSize;
}
return tree;
}
|
diff --git a/src/com/RoboMobo/Map.java b/src/com/RoboMobo/Map.java
index c45bf8b..28e2a0f 100644
--- a/src/com/RoboMobo/Map.java
+++ b/src/com/RoboMobo/Map.java
@@ -1,506 +1,506 @@
package com.RoboMobo;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.Rect;
import android.os.Message;
import android.util.Log;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.concurrent.ExecutionException;
/**
* Created with IntelliJ IDEA.
* User: Роман
* Date: 31.07.13
* Time: 11:01
*/
public class Map
{
public final int width;
public final int height;
public double corner1latt = 0;
public double corner1long = 0;
public double corner2latt = 0;
public double corner2long = 0;
public boolean corner1fixed;
public boolean corner2fixed;
public double basexlatt = 0;
public double basexlong = 0;
public double baseylatt = 0;
public double baseylong = 0;
public double det;
public ArrayList<int[]> pickups;
public Player p0;
public Player p1;
public float prevFilteredCompass = 0;
public MapState state;
public Point suspendTile;
/**
* Array of tile IDs. Every [width] indexes starts a new row.
*/
public short[][] tiles;
public Map(int w, int h)
{
this.width = w;
this.height = h;
pickups = new ArrayList<int[]>();
corner1fixed = false;
corner2fixed = false;
tiles = new short[RMR.mapSideLength][RMR.mapSideLength];
for (int i = 0; i < RMR.mapSideLength; i++)
{
for (int j = 0; j < RMR.mapSideLength; j++)
{
tiles[i][j] = 0;
}
}
for (int i = 0; i < 3; i++)
{
tiles[3][i] = 1;
}
for (int i = 2; i < 7; i++)
{
tiles[i][5] = 1;
}
for (int i = 0; i < 3; i++)
{
tiles[i][8] = 2;
}
state = MapState.PreGame;
this.suspendTile = null;
/*Runnable r = new Runnable()
{
@Override
public void run()
{
Generation.generateRivers(tiles, width, height);
}
};
r.run();*/
}
public void Update(long elapsedTime)
{
//Log.wtf("current coords", RMR.gps.last_latt + " " + RMR.gps.last_long);
int[] coord = coordTransform(RMR.gps.last_latt, RMR.gps.last_long);
if (coord != null)
{
this.p0.changePos(coord);
}
if (this.suspendTile != null && (Math.floor(this.p0.posX / 32.0) == this.suspendTile.x && Math.floor(this.p0.posY / 32.0) == this.suspendTile.y))
{
this.suspendTile = null;
this.state = MapState.Game;
}
if (RMR.state == RMR.GameState.ClientIngame || RMR.state == RMR.GameState.ServerIngame)
{
JSONObject pl = new JSONObject();
try
{
pl.put("X", p0.posX);
pl.put("Y", p0.posY);
}
catch (JSONException e)
{
e.printStackTrace();
}
JSONObject jobj = new JSONObject();
try
{
jobj.put("Player", pl);
}
catch (JSONException e)
{
e.printStackTrace();
}
try
{
switch (RMR.net.getStatus())
{
case FINISHED:
JSONObject[] jb = RMR.net.get();
RMR.net = new Networking(RMR.net.ip, RMR.net.isServer);
RMR.net.execute(jobj);
if(jb==null)
{
break;
}
for (int i = 0; i < jb.length; i++)
{
JSONObject jo = jb[i];
if (jo.has("Player"))
{
this.p1.prevPosX = this.p1.posX;
this.p1.prevPosY = this.p1.posY;
this.p1.posX = jo.getJSONObject("Player").getInt("X");
this.p1.posY = jo.getJSONObject("Player").getInt("Y");
}
}
break;
case PENDING:
RMR.net.execute(jobj);
break;
}
}
catch (InterruptedException e)
{
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
catch (ExecutionException e)
{
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
catch (JSONException e)
{
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
if (state == MapState.Game)
{
if (this.p0.posX < 0 || this.p0.posY < 0 || Math.floor(this.p0.posX / 32.0) >= RMR.mapSideLength || Math.floor(this.p0.posY / 32.0) >= RMR.mapSideLength)
{
this.state = MapState.Suspended;
this.suspendTile = new Point();
this.suspendTile.set((int) Math.floor(this.p0.prevPosX / 32.0), (int) Math.floor(this.p0.prevPosY / 32.0));
}
else if (this.tiles[((int) Math.floor(this.p0.posX / 32.0))][((int) Math.floor(this.p0.posY / 32.0))] != 0)
{
this.state = MapState.Suspended;
this.suspendTile = new Point();
this.suspendTile.set((int) Math.floor(this.p0.prevPosX / 32.0), (int) Math.floor(this.p0.prevPosY / 32.0));
}
for (int i = 0; i < this.pickups.size(); i++)
{
this.pickups.get(i)[2] -= elapsedTime;
if (this.pickups.get(i)[2] <= 0)
{
pickups.remove(this.pickups.get(i));
}
if ((Math.floor(this.p0.posX / 32.0) == this.pickups.get(i)[0]) && (Math.floor(this.p0.posY / 32.0) == this.pickups.get(i)[1]))
{
/*Log.wtf("Pl", Math.floor(this.player1.posX / 32) + " " + Math.floor(this.player1.posY / 32));
Log.wtf("Pick", this.pickups.get(i)[0] + " " + this.pickups.get(i)[1]);*/
this.p0.addScore(this.pickups.get(i)[4]);
this.pickups.remove(i);
Message msg = new Message();
msg.arg1 = this.p0.score;
((ActivityMain) RMR.am).HandlerUIUpdate.sendMessage(msg);
}
}
if (this.pickups.size() < 10 && RMR.rnd.nextInt(20) == 1)
{
int x = RMR.rnd.nextInt(RMR.mapSideLength);
int y = RMR.rnd.nextInt(RMR.mapSideLength);
if (this.tiles[x][y] == 0)
{
int t = RMR.rnd.nextInt(20000) + 20000;
pickups.add(new int[]{x, y, t, t, 1 + RMR.rnd.nextInt(2)}); //[x, y, timer, lifetime, type]
}
}
/*TextView text = (TextView) RMR.am.findViewById(R.id.tv_score);
text.setText("Очки: "+this.player1.score);*/
}
}
public void Draw()
{
Paint pa = new Paint();
RMR.c.save();
{
int mapW = this.width * 32;
int mapH = this.height * 32;
Player p = this.p0;
//double mapRotation = Math.toDegrees(Math.asin(Math.abs(this.basexlong - this.baseylong) / Math.sqrt(Math.pow(this.basexlatt - this.baseylatt, 2) + Math.pow(this.basexlong - this.baseylong, 2))));
double mapRotation = Math.toDegrees(Math.asin(Math.abs(p.posY - p.prevPosY) / Math.sqrt(Math.pow(p.posX - p.prevPosX, 2) + Math.pow(p.posY - p.prevPosY, 2))));
if ((p.posX - p.prevPosX) >= 0)
{
if ((p.posY - p.prevPosY) >= 0)
{
mapRotation = 180 - mapRotation;
}
else
{
mapRotation = 180 + mapRotation;
}
}
else
{
if ((p.posY - p.prevPosY) < 0)
{
mapRotation = 360 - mapRotation;
}
}
RMR.c.scale(((float) RMR.sw.getWidth() / (float) mapH), ((float) RMR.sw.getWidth() / (float) mapH));
Rect src = new Rect();
Rect dst = new Rect();
float α = 0.9f;
RMR.c.save();
{
RMR.c.translate(RMR.mapSideLength * 32 / 2, RMR.mapSideLength * 32 / 2);
/*if(this.corner1fixed && this.corner2fixed)*/
double delta = prevFilteredCompass + Math.toDegrees(RMR.compass.orientationData[0]);
Log.d("compass", delta + " " + prevFilteredCompass + " " + (Math.abs(delta) > 180));
delta = (delta > 180) ? (delta - 360) : ((delta < -180) ? (delta + 360) : delta);
prevFilteredCompass = (float) (α * delta - Math.toDegrees(RMR.compass.orientationData[0]));
if (this.corner1fixed && this.corner2fixed)
{
- RMR.c.rotate((!Double.isNaN(mapRotation) ? (float) mapRotation : 0) + prevFilteredCompass/*-(float) playerAngle*/, 0, 0);
+ RMR.c.rotate(/*(!Double.isNaN(mapRotation) ? (float) mapRotation : 0) + */prevFilteredCompass/*-(float) playerAngle*/, 0, 0);
}
RMR.c.translate(-this.p0.posY, -this.p0.posX);
pa.setColor(Color.DKGRAY);
RMR.c.save();
{
/*for (int i = 0; i < RMR.currentMap.width; i++)
{
for (int j = 0; j < RMR.currentMap.height; j++)
{
RMR.c.save();
{
RMR.c.translate(i * 32, j * 32);
RMR.c.drawLine(0, 0, 32, 0, pa);
RMR.c.drawLine(0, 0, 0, 32, pa);
RMR.c.drawLine(32, 32, 32, 0, pa);
RMR.c.drawLine(32, 32, 0, 32, pa);
}
RMR.c.restore();
}
}*/
RMR.c.drawLine(0, 0, 32 * this.width, 0, pa);
RMR.c.drawLine(0, 0, 0, 32 * this.height, pa);
RMR.c.drawLine(32 * this.width, 32 * this.height, 32 * this.width, 0, pa);
RMR.c.drawLine(32 * this.width, 32 * this.height, 0, 32 * this.height, pa);
}
RMR.c.restore();
pa = new Paint();
RMR.c.save();
{
if((System.currentTimeMillis() / 30) % 2 == 0) RMGR.tile_0_iterator++;
if(RMGR.tile_0_iterator == RMGR.TILE_0.length) RMGR.tile_0_iterator = 0;
if((System.currentTimeMillis() / 30) % 2 == 0) RMGR.tile_1_iterator++;
if(RMGR.tile_1_iterator == RMGR.TILE_1.length) RMGR.tile_1_iterator = 0;
for (int i = 0; i < this.height; i++)
{
for (int j = 0; j < this.width; j++)
{
switch (this.tiles[j][i])
{
default:
continue;
case 1:
RMR.c.save();
{
int it = RMGR.tile_0_iterator + i + j;
it %= RMGR.TILE_0.length;
RMR.c.translate(i * 32, j * 32);
src.set(0, 0, RMGR.TILE_0[it].getWidth(), RMGR.TILE_0[it].getHeight());
dst.set(0, 0, 32, 32);
pa.setColor(Color.WHITE);
RMR.c.drawBitmap(RMGR.TILE_0[it], src, dst, pa);
}
RMR.c.restore();
break;
case 2:
RMR.c.save();
{
int it = RMGR.tile_1_iterator + i + j;
it %= RMGR.TILE_1.length;
RMR.c.translate(i * 32, j * 32);
src.set(0, 0, RMGR.TILE_1[it].getWidth(), RMGR.TILE_1[it].getHeight());
dst.set(0, 0, 32, 32);
pa.setColor(Color.WHITE);
RMR.c.drawBitmap(RMGR.TILE_1[it], src, dst, pa);
}
RMR.c.restore();
break;
}
}
}
}
RMR.c.restore();
RMR.c.save();
{
if((System.currentTimeMillis() / 30) % 2 == 0) RMGR.pickup_0_iterator++;
if(RMGR.pickup_0_iterator == RMGR.PICKUP_0.length) RMGR.pickup_0_iterator = 0;
if((System.currentTimeMillis() / 30) % 2 == 0) RMGR.pickup_1_iterator++;
if(RMGR.pickup_1_iterator == RMGR.PICKUP_1.length) RMGR.pickup_1_iterator = 0;
for (int i = 0; i < this.pickups.size(); i++)
{
RMR.c.save();
{
RMR.c.translate(this.pickups.get(i)[1] * 32, this.pickups.get(i)[0] * 32);
pa.setAlpha((int) Math.floor(100 / ((float)this.pickups.get(i)[3] / ((float)this.pickups.get(i)[2] != 0 ? (float)this.pickups.get(i)[2] : 1))));
switch (this.pickups.get(i)[4])
{
default:
case 1:
int it = (RMGR.pickup_0_iterator + this.pickups.get(i)[0] + this.pickups.get(i)[1]);
it %= RMGR.PICKUP_0.length;
src.set(0, 0, RMGR.PICKUP_0[it].getWidth(), RMGR.PICKUP_0[it].getHeight());
dst.set(4, 4, 28, 28);
RMR.c.drawBitmap(RMGR.PICKUP_0[it], src, dst, pa);
break;
case 2:
int it1 = (RMGR.pickup_1_iterator + this.pickups.get(i)[0] + this.pickups.get(i)[1]);
it1 %= RMGR.PICKUP_1.length;
src.set(0, 0, RMGR.PICKUP_1[it1].getWidth(), RMGR.PICKUP_1[it1].getHeight());
dst.set(4, 4, 28, 28);
RMR.c.drawBitmap(RMGR.PICKUP_1[it1], src, dst, pa);
break;
}
}
RMR.c.restore();
}
}
RMR.c.restore();
pa = new Paint();
if (this.state == MapState.Suspended)
{
RMR.c.save();
{
pa.setColor(Color.BLACK);
pa.setAlpha(80);
RMR.c.drawRect(0, 0, RMR.mapSideLength * 32, this.suspendTile.x * 32, pa);
RMR.c.drawRect((this.suspendTile.y + 1) * 32, this.suspendTile.x * 32, RMR.mapSideLength * 32, (this.suspendTile.x + 1) * 32, pa);
RMR.c.drawRect(0, this.suspendTile.x * 32, this.suspendTile.y * 32, (this.suspendTile.x + 1) * 32, pa);
RMR.c.drawRect(0, (this.suspendTile.x + 1) * 32, RMR.mapSideLength * 32, RMR.mapSideLength * 32, pa);
pa.setColor(Color.YELLOW);
pa.setAlpha((int) Math.floor((Math.sin(System.currentTimeMillis() / 100) + 2) * 30));
RMR.c.drawRect(this.suspendTile.y * 32, this.suspendTile.x * 32, (this.suspendTile.y + 1) * 32, (this.suspendTile.x + 1) * 32, pa);
}
RMR.c.restore();
}
RMR.c.save();
{
pa.setColor(Color.WHITE);
RMR.c.translate(p1.posY, p1.posX);
src.set(0, 0, RMGR.CHAR_0.getWidth(), RMGR.CHAR_0.getHeight());
dst.set(-12, -12, 12, 12);
RMR.c.drawBitmap(RMGR.CHAR_0, src, dst, pa);
}
RMR.c.restore();
}
RMR.c.restore();
RMR.c.save();
{
pa.setColor(Color.WHITE);
RMR.c.translate(RMR.mapSideLength * 32 / 2, RMR.mapSideLength * 32 / 2);
src.set(0, 0, RMGR.CHAR_0.getWidth(), RMGR.CHAR_0.getHeight());
dst.set(-12, -12, 12, 12);
RMR.c.drawBitmap(RMGR.CHAR_0, src, dst, pa);
}
RMR.c.restore();
}
RMR.c.restore();
}
public void fixCorner1(double latt, double longt)
{
if (corner1fixed)
{
return;
}
corner1latt = latt;
corner1long = longt;
corner1fixed = true;
}
public void fixCorner2(double latt, double longt)
{
if (corner2fixed)
{
return;
}
corner2latt = latt;
corner2long = longt;
corner2fixed = true;
double dbaseLatt = (corner2latt - corner1latt) / Math.sqrt((corner2latt - corner1latt) * (corner2latt - corner1latt) + (corner2long - corner1long) * (corner2long - corner1long));
double dbaseLong = (corner2long - corner1long) / Math.sqrt((corner2latt - corner1latt) * (corner2latt - corner1latt) + (corner2long - corner1long) * (corner2long - corner1long));
basexlatt = (dbaseLatt / 2 - dbaseLong / 2) * Math.sqrt(2);
basexlong = (dbaseLong / 2 + dbaseLatt / 2) * Math.sqrt(2);
baseylatt = (dbaseLatt / 2 + dbaseLong / 2) * Math.sqrt(2);
baseylong = (dbaseLong / 2 - dbaseLatt / 2) * Math.sqrt(2);
det = basexlatt * baseylong - basexlong * baseylatt;
this.p0.posX = 32 * (RMR.mapSideLength - 1);
this.p0.posY = 32 * (RMR.mapSideLength - 1);
//Log.wtf("dbase",Double.toString(Math.sqrt(dbaseLatt*dbaseLatt+dbaseLong*dbaseLong)));
//Log.wtf("basex",Double.toString(Math.sqrt(basexlatt*basexlatt+baseylong*baseylong)));
//Log.wtf("basey",Double.toString(Math.sqrt(baseylatt*baseylatt+baseylong*baseylong)));
}
public int[] coordTransform(double latt, double longt)
{
if (!(corner1fixed && corner2fixed))
{
return null;
}
double relLatt = latt - corner1latt;
double relLong = longt - corner1long;
int[] coord = new int[2];
coord[0] = (int) ((baseylong * relLatt / det - baseylatt * relLong / det) * Math.sqrt((2048 * RMR.mapSideLength * RMR.mapSideLength) / ((corner2latt - corner1latt) * (corner2latt - corner1latt) + (corner2long - corner1long) * (corner2long - corner1long))));
coord[1] = (int) ((-basexlong * relLatt / det + basexlatt * relLong / det) * Math.sqrt((2048 * RMR.mapSideLength * RMR.mapSideLength) / ((corner2latt - corner1latt) * (corner2latt - corner1latt) + (corner2long - corner1long) * (corner2long - corner1long))));
return coord;
}
public enum MapState
{
Invalid,
PreGame,
Game,
Suspended,
PostGame
}
}
| true
| true
|
public void Draw()
{
Paint pa = new Paint();
RMR.c.save();
{
int mapW = this.width * 32;
int mapH = this.height * 32;
Player p = this.p0;
//double mapRotation = Math.toDegrees(Math.asin(Math.abs(this.basexlong - this.baseylong) / Math.sqrt(Math.pow(this.basexlatt - this.baseylatt, 2) + Math.pow(this.basexlong - this.baseylong, 2))));
double mapRotation = Math.toDegrees(Math.asin(Math.abs(p.posY - p.prevPosY) / Math.sqrt(Math.pow(p.posX - p.prevPosX, 2) + Math.pow(p.posY - p.prevPosY, 2))));
if ((p.posX - p.prevPosX) >= 0)
{
if ((p.posY - p.prevPosY) >= 0)
{
mapRotation = 180 - mapRotation;
}
else
{
mapRotation = 180 + mapRotation;
}
}
else
{
if ((p.posY - p.prevPosY) < 0)
{
mapRotation = 360 - mapRotation;
}
}
RMR.c.scale(((float) RMR.sw.getWidth() / (float) mapH), ((float) RMR.sw.getWidth() / (float) mapH));
Rect src = new Rect();
Rect dst = new Rect();
float α = 0.9f;
RMR.c.save();
{
RMR.c.translate(RMR.mapSideLength * 32 / 2, RMR.mapSideLength * 32 / 2);
/*if(this.corner1fixed && this.corner2fixed)*/
double delta = prevFilteredCompass + Math.toDegrees(RMR.compass.orientationData[0]);
Log.d("compass", delta + " " + prevFilteredCompass + " " + (Math.abs(delta) > 180));
delta = (delta > 180) ? (delta - 360) : ((delta < -180) ? (delta + 360) : delta);
prevFilteredCompass = (float) (α * delta - Math.toDegrees(RMR.compass.orientationData[0]));
if (this.corner1fixed && this.corner2fixed)
{
RMR.c.rotate((!Double.isNaN(mapRotation) ? (float) mapRotation : 0) + prevFilteredCompass/*-(float) playerAngle*/, 0, 0);
}
RMR.c.translate(-this.p0.posY, -this.p0.posX);
pa.setColor(Color.DKGRAY);
RMR.c.save();
{
/*for (int i = 0; i < RMR.currentMap.width; i++)
{
for (int j = 0; j < RMR.currentMap.height; j++)
{
RMR.c.save();
{
RMR.c.translate(i * 32, j * 32);
RMR.c.drawLine(0, 0, 32, 0, pa);
RMR.c.drawLine(0, 0, 0, 32, pa);
RMR.c.drawLine(32, 32, 32, 0, pa);
RMR.c.drawLine(32, 32, 0, 32, pa);
}
RMR.c.restore();
}
}*/
RMR.c.drawLine(0, 0, 32 * this.width, 0, pa);
RMR.c.drawLine(0, 0, 0, 32 * this.height, pa);
RMR.c.drawLine(32 * this.width, 32 * this.height, 32 * this.width, 0, pa);
RMR.c.drawLine(32 * this.width, 32 * this.height, 0, 32 * this.height, pa);
}
RMR.c.restore();
pa = new Paint();
RMR.c.save();
{
if((System.currentTimeMillis() / 30) % 2 == 0) RMGR.tile_0_iterator++;
if(RMGR.tile_0_iterator == RMGR.TILE_0.length) RMGR.tile_0_iterator = 0;
if((System.currentTimeMillis() / 30) % 2 == 0) RMGR.tile_1_iterator++;
if(RMGR.tile_1_iterator == RMGR.TILE_1.length) RMGR.tile_1_iterator = 0;
for (int i = 0; i < this.height; i++)
{
for (int j = 0; j < this.width; j++)
{
switch (this.tiles[j][i])
{
default:
continue;
case 1:
RMR.c.save();
{
int it = RMGR.tile_0_iterator + i + j;
it %= RMGR.TILE_0.length;
RMR.c.translate(i * 32, j * 32);
src.set(0, 0, RMGR.TILE_0[it].getWidth(), RMGR.TILE_0[it].getHeight());
dst.set(0, 0, 32, 32);
pa.setColor(Color.WHITE);
RMR.c.drawBitmap(RMGR.TILE_0[it], src, dst, pa);
}
RMR.c.restore();
break;
case 2:
RMR.c.save();
{
int it = RMGR.tile_1_iterator + i + j;
it %= RMGR.TILE_1.length;
RMR.c.translate(i * 32, j * 32);
src.set(0, 0, RMGR.TILE_1[it].getWidth(), RMGR.TILE_1[it].getHeight());
dst.set(0, 0, 32, 32);
pa.setColor(Color.WHITE);
RMR.c.drawBitmap(RMGR.TILE_1[it], src, dst, pa);
}
RMR.c.restore();
break;
}
}
}
}
RMR.c.restore();
RMR.c.save();
{
if((System.currentTimeMillis() / 30) % 2 == 0) RMGR.pickup_0_iterator++;
if(RMGR.pickup_0_iterator == RMGR.PICKUP_0.length) RMGR.pickup_0_iterator = 0;
if((System.currentTimeMillis() / 30) % 2 == 0) RMGR.pickup_1_iterator++;
if(RMGR.pickup_1_iterator == RMGR.PICKUP_1.length) RMGR.pickup_1_iterator = 0;
for (int i = 0; i < this.pickups.size(); i++)
{
RMR.c.save();
{
RMR.c.translate(this.pickups.get(i)[1] * 32, this.pickups.get(i)[0] * 32);
pa.setAlpha((int) Math.floor(100 / ((float)this.pickups.get(i)[3] / ((float)this.pickups.get(i)[2] != 0 ? (float)this.pickups.get(i)[2] : 1))));
switch (this.pickups.get(i)[4])
{
default:
case 1:
int it = (RMGR.pickup_0_iterator + this.pickups.get(i)[0] + this.pickups.get(i)[1]);
it %= RMGR.PICKUP_0.length;
src.set(0, 0, RMGR.PICKUP_0[it].getWidth(), RMGR.PICKUP_0[it].getHeight());
dst.set(4, 4, 28, 28);
RMR.c.drawBitmap(RMGR.PICKUP_0[it], src, dst, pa);
break;
case 2:
int it1 = (RMGR.pickup_1_iterator + this.pickups.get(i)[0] + this.pickups.get(i)[1]);
it1 %= RMGR.PICKUP_1.length;
src.set(0, 0, RMGR.PICKUP_1[it1].getWidth(), RMGR.PICKUP_1[it1].getHeight());
dst.set(4, 4, 28, 28);
RMR.c.drawBitmap(RMGR.PICKUP_1[it1], src, dst, pa);
break;
}
}
RMR.c.restore();
}
}
RMR.c.restore();
pa = new Paint();
if (this.state == MapState.Suspended)
{
RMR.c.save();
{
pa.setColor(Color.BLACK);
pa.setAlpha(80);
RMR.c.drawRect(0, 0, RMR.mapSideLength * 32, this.suspendTile.x * 32, pa);
RMR.c.drawRect((this.suspendTile.y + 1) * 32, this.suspendTile.x * 32, RMR.mapSideLength * 32, (this.suspendTile.x + 1) * 32, pa);
RMR.c.drawRect(0, this.suspendTile.x * 32, this.suspendTile.y * 32, (this.suspendTile.x + 1) * 32, pa);
RMR.c.drawRect(0, (this.suspendTile.x + 1) * 32, RMR.mapSideLength * 32, RMR.mapSideLength * 32, pa);
pa.setColor(Color.YELLOW);
pa.setAlpha((int) Math.floor((Math.sin(System.currentTimeMillis() / 100) + 2) * 30));
RMR.c.drawRect(this.suspendTile.y * 32, this.suspendTile.x * 32, (this.suspendTile.y + 1) * 32, (this.suspendTile.x + 1) * 32, pa);
}
RMR.c.restore();
}
RMR.c.save();
{
pa.setColor(Color.WHITE);
RMR.c.translate(p1.posY, p1.posX);
src.set(0, 0, RMGR.CHAR_0.getWidth(), RMGR.CHAR_0.getHeight());
dst.set(-12, -12, 12, 12);
RMR.c.drawBitmap(RMGR.CHAR_0, src, dst, pa);
}
RMR.c.restore();
}
RMR.c.restore();
RMR.c.save();
{
pa.setColor(Color.WHITE);
RMR.c.translate(RMR.mapSideLength * 32 / 2, RMR.mapSideLength * 32 / 2);
src.set(0, 0, RMGR.CHAR_0.getWidth(), RMGR.CHAR_0.getHeight());
dst.set(-12, -12, 12, 12);
RMR.c.drawBitmap(RMGR.CHAR_0, src, dst, pa);
}
RMR.c.restore();
}
RMR.c.restore();
}
|
public void Draw()
{
Paint pa = new Paint();
RMR.c.save();
{
int mapW = this.width * 32;
int mapH = this.height * 32;
Player p = this.p0;
//double mapRotation = Math.toDegrees(Math.asin(Math.abs(this.basexlong - this.baseylong) / Math.sqrt(Math.pow(this.basexlatt - this.baseylatt, 2) + Math.pow(this.basexlong - this.baseylong, 2))));
double mapRotation = Math.toDegrees(Math.asin(Math.abs(p.posY - p.prevPosY) / Math.sqrt(Math.pow(p.posX - p.prevPosX, 2) + Math.pow(p.posY - p.prevPosY, 2))));
if ((p.posX - p.prevPosX) >= 0)
{
if ((p.posY - p.prevPosY) >= 0)
{
mapRotation = 180 - mapRotation;
}
else
{
mapRotation = 180 + mapRotation;
}
}
else
{
if ((p.posY - p.prevPosY) < 0)
{
mapRotation = 360 - mapRotation;
}
}
RMR.c.scale(((float) RMR.sw.getWidth() / (float) mapH), ((float) RMR.sw.getWidth() / (float) mapH));
Rect src = new Rect();
Rect dst = new Rect();
float α = 0.9f;
RMR.c.save();
{
RMR.c.translate(RMR.mapSideLength * 32 / 2, RMR.mapSideLength * 32 / 2);
/*if(this.corner1fixed && this.corner2fixed)*/
double delta = prevFilteredCompass + Math.toDegrees(RMR.compass.orientationData[0]);
Log.d("compass", delta + " " + prevFilteredCompass + " " + (Math.abs(delta) > 180));
delta = (delta > 180) ? (delta - 360) : ((delta < -180) ? (delta + 360) : delta);
prevFilteredCompass = (float) (α * delta - Math.toDegrees(RMR.compass.orientationData[0]));
if (this.corner1fixed && this.corner2fixed)
{
RMR.c.rotate(/*(!Double.isNaN(mapRotation) ? (float) mapRotation : 0) + */prevFilteredCompass/*-(float) playerAngle*/, 0, 0);
}
RMR.c.translate(-this.p0.posY, -this.p0.posX);
pa.setColor(Color.DKGRAY);
RMR.c.save();
{
/*for (int i = 0; i < RMR.currentMap.width; i++)
{
for (int j = 0; j < RMR.currentMap.height; j++)
{
RMR.c.save();
{
RMR.c.translate(i * 32, j * 32);
RMR.c.drawLine(0, 0, 32, 0, pa);
RMR.c.drawLine(0, 0, 0, 32, pa);
RMR.c.drawLine(32, 32, 32, 0, pa);
RMR.c.drawLine(32, 32, 0, 32, pa);
}
RMR.c.restore();
}
}*/
RMR.c.drawLine(0, 0, 32 * this.width, 0, pa);
RMR.c.drawLine(0, 0, 0, 32 * this.height, pa);
RMR.c.drawLine(32 * this.width, 32 * this.height, 32 * this.width, 0, pa);
RMR.c.drawLine(32 * this.width, 32 * this.height, 0, 32 * this.height, pa);
}
RMR.c.restore();
pa = new Paint();
RMR.c.save();
{
if((System.currentTimeMillis() / 30) % 2 == 0) RMGR.tile_0_iterator++;
if(RMGR.tile_0_iterator == RMGR.TILE_0.length) RMGR.tile_0_iterator = 0;
if((System.currentTimeMillis() / 30) % 2 == 0) RMGR.tile_1_iterator++;
if(RMGR.tile_1_iterator == RMGR.TILE_1.length) RMGR.tile_1_iterator = 0;
for (int i = 0; i < this.height; i++)
{
for (int j = 0; j < this.width; j++)
{
switch (this.tiles[j][i])
{
default:
continue;
case 1:
RMR.c.save();
{
int it = RMGR.tile_0_iterator + i + j;
it %= RMGR.TILE_0.length;
RMR.c.translate(i * 32, j * 32);
src.set(0, 0, RMGR.TILE_0[it].getWidth(), RMGR.TILE_0[it].getHeight());
dst.set(0, 0, 32, 32);
pa.setColor(Color.WHITE);
RMR.c.drawBitmap(RMGR.TILE_0[it], src, dst, pa);
}
RMR.c.restore();
break;
case 2:
RMR.c.save();
{
int it = RMGR.tile_1_iterator + i + j;
it %= RMGR.TILE_1.length;
RMR.c.translate(i * 32, j * 32);
src.set(0, 0, RMGR.TILE_1[it].getWidth(), RMGR.TILE_1[it].getHeight());
dst.set(0, 0, 32, 32);
pa.setColor(Color.WHITE);
RMR.c.drawBitmap(RMGR.TILE_1[it], src, dst, pa);
}
RMR.c.restore();
break;
}
}
}
}
RMR.c.restore();
RMR.c.save();
{
if((System.currentTimeMillis() / 30) % 2 == 0) RMGR.pickup_0_iterator++;
if(RMGR.pickup_0_iterator == RMGR.PICKUP_0.length) RMGR.pickup_0_iterator = 0;
if((System.currentTimeMillis() / 30) % 2 == 0) RMGR.pickup_1_iterator++;
if(RMGR.pickup_1_iterator == RMGR.PICKUP_1.length) RMGR.pickup_1_iterator = 0;
for (int i = 0; i < this.pickups.size(); i++)
{
RMR.c.save();
{
RMR.c.translate(this.pickups.get(i)[1] * 32, this.pickups.get(i)[0] * 32);
pa.setAlpha((int) Math.floor(100 / ((float)this.pickups.get(i)[3] / ((float)this.pickups.get(i)[2] != 0 ? (float)this.pickups.get(i)[2] : 1))));
switch (this.pickups.get(i)[4])
{
default:
case 1:
int it = (RMGR.pickup_0_iterator + this.pickups.get(i)[0] + this.pickups.get(i)[1]);
it %= RMGR.PICKUP_0.length;
src.set(0, 0, RMGR.PICKUP_0[it].getWidth(), RMGR.PICKUP_0[it].getHeight());
dst.set(4, 4, 28, 28);
RMR.c.drawBitmap(RMGR.PICKUP_0[it], src, dst, pa);
break;
case 2:
int it1 = (RMGR.pickup_1_iterator + this.pickups.get(i)[0] + this.pickups.get(i)[1]);
it1 %= RMGR.PICKUP_1.length;
src.set(0, 0, RMGR.PICKUP_1[it1].getWidth(), RMGR.PICKUP_1[it1].getHeight());
dst.set(4, 4, 28, 28);
RMR.c.drawBitmap(RMGR.PICKUP_1[it1], src, dst, pa);
break;
}
}
RMR.c.restore();
}
}
RMR.c.restore();
pa = new Paint();
if (this.state == MapState.Suspended)
{
RMR.c.save();
{
pa.setColor(Color.BLACK);
pa.setAlpha(80);
RMR.c.drawRect(0, 0, RMR.mapSideLength * 32, this.suspendTile.x * 32, pa);
RMR.c.drawRect((this.suspendTile.y + 1) * 32, this.suspendTile.x * 32, RMR.mapSideLength * 32, (this.suspendTile.x + 1) * 32, pa);
RMR.c.drawRect(0, this.suspendTile.x * 32, this.suspendTile.y * 32, (this.suspendTile.x + 1) * 32, pa);
RMR.c.drawRect(0, (this.suspendTile.x + 1) * 32, RMR.mapSideLength * 32, RMR.mapSideLength * 32, pa);
pa.setColor(Color.YELLOW);
pa.setAlpha((int) Math.floor((Math.sin(System.currentTimeMillis() / 100) + 2) * 30));
RMR.c.drawRect(this.suspendTile.y * 32, this.suspendTile.x * 32, (this.suspendTile.y + 1) * 32, (this.suspendTile.x + 1) * 32, pa);
}
RMR.c.restore();
}
RMR.c.save();
{
pa.setColor(Color.WHITE);
RMR.c.translate(p1.posY, p1.posX);
src.set(0, 0, RMGR.CHAR_0.getWidth(), RMGR.CHAR_0.getHeight());
dst.set(-12, -12, 12, 12);
RMR.c.drawBitmap(RMGR.CHAR_0, src, dst, pa);
}
RMR.c.restore();
}
RMR.c.restore();
RMR.c.save();
{
pa.setColor(Color.WHITE);
RMR.c.translate(RMR.mapSideLength * 32 / 2, RMR.mapSideLength * 32 / 2);
src.set(0, 0, RMGR.CHAR_0.getWidth(), RMGR.CHAR_0.getHeight());
dst.set(-12, -12, 12, 12);
RMR.c.drawBitmap(RMGR.CHAR_0, src, dst, pa);
}
RMR.c.restore();
}
RMR.c.restore();
}
|
diff --git a/src/main/java/com/ochafik/math/bayes/JunctionTreeAlgorithmUtils.java b/src/main/java/com/ochafik/math/bayes/JunctionTreeAlgorithmUtils.java
index dbd45d0..eb6462e 100644
--- a/src/main/java/com/ochafik/math/bayes/JunctionTreeAlgorithmUtils.java
+++ b/src/main/java/com/ochafik/math/bayes/JunctionTreeAlgorithmUtils.java
@@ -1,295 +1,295 @@
/*
* Copyright (C) 2011 by Olivier Chafik (http://ochafik.com)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.ochafik.math.bayes;
import static com.ochafik.math.functions.Functions.*;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.Map.Entry;
import com.ochafik.math.functions.Function;
import com.ochafik.math.functions.FunctionException;
import com.ochafik.math.functions.Functions;
import com.ochafik.math.functions.Variable;
import com.ochafik.math.graph.Clique;
import com.ochafik.math.graph.Graph;
import com.ochafik.math.graph.GraphUtils;
import com.ochafik.math.graph.NodeSet;
import com.ochafik.util.CollectionAdapter;
import com.ochafik.util.DefaultAdapter;
import com.ochafik.util.listenable.Pair;
import com.ochafik.util.string.StringUtils;
public class JunctionTreeAlgorithmUtils {
@SuppressWarnings("serial")
static class SeparatorsPotentials extends TreeMap<Pair<Integer, Integer>, Function<Variable>>{
public Function<Variable> get(int iClique1, int iClique2) {
int smallest = iClique1 < iClique2 ? iClique1 : iClique2;
int biggest = iClique1 > iClique2 ? iClique1 : iClique2;
return get(new Pair<Integer, Integer>(smallest, biggest));
}
public Function<Variable> put(int iClique1, int iClique2, Function<Variable> f) {
int smallest = iClique1 < iClique2 ? iClique1 : iClique2;
int biggest = iClique1 > iClique2 ? iClique1 : iClique2;
return put(new Pair<Integer, Integer>(smallest, biggest), f);
}
};
private static final void passMessage(int iSource, int iDestination, List<NodeSet<Variable>> nodeSetList, Map<Integer, Function<Variable>> cliquePotentials, SeparatorsPotentials separatorsPotentials) throws FunctionException {
//System.out.println("Message\n\t" + nodeSetList.get(iSource) + "\n->\t" + nodeSetList.get(iDestination));
Function<Variable> oldSepPotential = separatorsPotentials.get(iSource, iDestination);
Collection<Variable> sourceNodes = nodeSetList.get(iSource).getNodes();
Collection<Variable> destNodes = nodeSetList.get(iDestination).getNodes();
Set<Variable> varsToMarginalizeOut = new TreeSet<Variable>(sourceNodes);
varsToMarginalizeOut.removeAll(destNodes);
Function<Variable> sourcePotential = cliquePotentials.get(iSource);
Function<Variable> newSepPotential = Functions.cache(Functions.marginalize(sourcePotential, varsToMarginalizeOut));
// TODO check this !!!
separatorsPotentials.put(iSource, iDestination, newSepPotential);
//separatorsPotentials.put(iSource, iDestination, multiply(constant(1/2D ), add(newSepPotential, separatorsPotentials.get(iSource, iDestination))));
Function<Variable> oldDestPotential = cliquePotentials.get(iDestination);
Function<Variable> ratioSep = oldSepPotential == null ? newSepPotential : multiply(newSepPotential, invert(oldSepPotential));
Function<Variable> newDestPotential = oldDestPotential == null ? ratioSep : multiply(oldDestPotential, ratioSep);
//System.out.println(oldDestPotential + " * " + newSepPotential + " / " + oldSepPotential + " = " + newDestPotential);
cliquePotentials.put(iDestination, Functions.cache(newDestPotential));
}
private static final void collectEvidence(int iSource, int iCaller, boolean[] markedCliques, List<NodeSet<Variable>> nodeSetList, Map<Integer,Set<Integer>> cliquesNeighbours, Map<Integer, Function<Variable>> cliquePotentials, SeparatorsPotentials separatorsPotentials) throws FunctionException {
markedCliques[iSource] = true;
for (int iNeighbour : cliquesNeighbours.get(iSource)) {
if (!markedCliques[iNeighbour]) {
collectEvidence(iNeighbour, iSource, markedCliques, nodeSetList, cliquesNeighbours, cliquePotentials, separatorsPotentials);
}
}
if (iCaller >= 0) {
passMessage(iSource, iCaller, nodeSetList, cliquePotentials, separatorsPotentials);
}
}
private static final void distributeEvidence(int iSource, boolean[] markedCliques, List<NodeSet<Variable>> nodeSetList, Map<Integer,Set<Integer>> cliquesNeighbours, Map<Integer, Function<Variable>> cliquePotentials, SeparatorsPotentials separatorsPotentials) throws FunctionException {
markedCliques[iSource] = true;
for (int iNeighbour : cliquesNeighbours.get(iSource)) {
if (!markedCliques[iNeighbour]) {
passMessage(iSource, iNeighbour, nodeSetList, cliquePotentials, separatorsPotentials);
}
}
for (int iNeighbour : cliquesNeighbours.get(iSource)) {
if (!markedCliques[iNeighbour]) {
distributeEvidence(iNeighbour, markedCliques, nodeSetList, cliquesNeighbours, cliquePotentials, separatorsPotentials);
}
}
}
private static final void printCliquePotentials(String title, final List<NodeSet<Variable>> nodeSetList, Map<Integer, Function<Variable>> cliquePotentials) {
PrintStream out = System.out;
out.println(title+" : \n\t"+
StringUtils.implode(
new CollectionAdapter<Map.Entry<Integer,Function<Variable>>, String>(
cliquePotentials.entrySet(),
new DefaultAdapter<Map.Entry<Integer,Function<Variable>>, String>() {
public String adapt(Entry<Integer, Function<Variable>> value) {
return nodeSetList.get(value.getKey())+" = \n\t\t"+value.getValue();
}
}
),
"\n\t"
)
);
}
private static final void globalPropagation(int iSource, List<NodeSet<Variable>> nodeSetList, Map<Integer,Set<Integer>> cliquesNeighbours, Map<Integer, Function<Variable>> cliquePotentials, SeparatorsPotentials separatorsPotentials) throws FunctionException {
boolean[] markedCliques = new boolean[nodeSetList.size()];
//printCliquePotentials("Initial clique potentials", nodeSetList, cliquePotentials);
//System.out.println("Collecting evidence");
collectEvidence(iSource, -1, markedCliques, nodeSetList, cliquesNeighbours, cliquePotentials, separatorsPotentials);
//printCliquePotentials("Clique potentials after evidence collection", nodeSetList, cliquePotentials);
Arrays.fill(markedCliques, false);
//System.out.println("Distributing evidence");
distributeEvidence(iSource, markedCliques, nodeSetList, cliquesNeighbours, cliquePotentials, separatorsPotentials);
//printCliquePotentials("Clique potentials after evidence distribution", nodeSetList, cliquePotentials);
}
public static final Map<Variable, List<Function<Variable>>> junctionTreeInference(Graph<Variable> graph, Map<Variable, ? extends Function<Variable>> fusionedDefinitions) throws FunctionException {
Graph<NodeSet<Variable>> junctionTree = GraphUtils.createJunctionTree(graph);
Map<Integer, Function<Variable>> cliquePotentials = new TreeMap<Integer, Function<Variable>>();
SeparatorsPotentials separatorsPotentials = new SeparatorsPotentials();
// Cliques and separator have an uniform indexing which refers to nodeSetList
List<NodeSet<Variable>> nodeSetList = junctionTree.getNodeList();
List<Integer> cliqueIndexList = new ArrayList<Integer>(nodeSetList.size());
for (int iNodeSet = nodeSetList.size(); iNodeSet-- != 0;) {
NodeSet<Variable> nodeSet = nodeSetList.get(iNodeSet);
if (nodeSet instanceof Clique) {
cliqueIndexList.add(iNodeSet);
}
}
long startTime = System.currentTimeMillis();
System.out.print("Computing potentials formulae...");
List<Variable> variableList = graph.getNodeList();
Set<Variable>
assignedVariables = new HashSet<Variable>(),
unassignedVariables = new HashSet<Variable>(variableList);
// INITIALIZATION
for (int iClique : cliqueIndexList) {
Function<Variable> product = null;
Clique<Variable> clique = (Clique<Variable>)nodeSetList.get(iClique);
Collection<Variable> cliqueNodes = clique.getNodes();
for (Variable cliqueNode : cliqueNodes) {
if (!assignedVariables.contains(cliqueNode)) {
Function<Variable> cond = fusionedDefinitions.get(cliqueNode);
if (cliqueNodes.containsAll(cond.getArgumentNames())) {
//System.out.println("Variable " + cliqueNode + " = f" + cond.getArgumentNames() +" assigned to clique " + nodeSetList.get(iClique));
assignedVariables.add(cliqueNode);
unassignedVariables.remove(cliqueNode);
product = product == null ? cond : Functions.multiply(product, cond);
}
}
}
cliquePotentials.put(iClique, product == null ? Functions.constant(1) : product);
}
if (!unassignedVariables.isEmpty()) {
throw new RuntimeException("Failed to assign all variables conditional probabilities to a node ! (remaining "+unassignedVariables+")");
//new RuntimeException("Failed to assign all variables conditional probabilities to a node ! (remaining "+unassignedVariables+")").printStackTrace();
}
// Build connectivity of cliques + initialize the separators potentials
Map<Integer,Set<Integer>> cliquesNeighbours = new TreeMap<Integer, Set<Integer>>();
for (int iClique : cliqueIndexList) {
Set<Integer> neighbourCliques = new TreeSet<Integer>();
for (int iNeighbourSeparator : junctionTree.getLocalConnectivity().getNeighbours(iClique).toArray()) {
for (int iNeighbourClique : junctionTree.getLocalConnectivity().getNeighbours(iNeighbourSeparator).toArray()) {
if (iNeighbourClique != iClique) {
neighbourCliques.add(iNeighbourClique);
//separatorsPotentials.put(iClique, iNeighbourClique, Functions.constant(1));
}
}
}
cliquesNeighbours.put(iClique, neighbourCliques);
}
Integer startingClique = cliqueIndexList.get(cliqueIndexList.size() - 1);
//System.out.println("Starting global propagation by clique " + nodeSetList.get(startingClique));
globalPropagation(startingClique, nodeSetList, cliquesNeighbours, cliquePotentials, separatorsPotentials);
//globalPropagation(cliqueIndexList.get(0), nodeSetList, cliquesNeighbours, cliquePotentials, separatorsPotentials);
// Normalize each clique's potential
for (Map.Entry<Integer, Function<Variable>> e : cliquePotentials.entrySet())
- e.setValue(Functions.normalize(e.getValue(), 1));//, "{clique" + nodeSetList.get(e.getKey()) + " = f" + e.getValue().getArgumentNames()+"}"));
+ cliquePotentials.put(e.getKey(), Functions.normalize(e.getValue(), 1));//, "{clique" + nodeSetList.get(e.getKey()) + " = f" + e.getValue().getArgumentNames()+"}"));
// Get all the potentials by clique / separator and marginalize them for each variable. Store the result by variable
Map<Variable, List<Function<Variable>>> ret = new TreeMap<Variable, List<Function<Variable>>>();
for (Variable v : graph.getNodeList()) {
ret.put(v, new ArrayList<Function<Variable>>());
}
for (Map.Entry<Integer, Function<Variable>> entry : cliquePotentials.entrySet()) {
int iClique = entry.getKey();
Function<Variable> cliquePotential = entry.getValue();
Collection<Variable> cliqueNodes = ((Clique<Variable>)nodeSetList.get(iClique)).getNodes();
// Marginalize cliquePotential for each variable of the clique
for (Variable v : cliquePotential.getArgumentNames()) {
// Set<Variable> parameters = new TreeSet<Variable>(cliquePotential.getArgumentNames());
// parameters.remove(v);
// //System.out.println(new TabulatedFunction<Variable>(cliquePotential).toString(Arrays.asList(v)));
ret.get(v).add(//Functions.normalize(
//Functions.marginalize(
//Functions.normalize(
cliquePotential//,
//cliquePotential.getArgumentNames()
//),
//parameters)
//, 1, null)
);
}
}
for (Map.Entry<Variable, List<Function<Variable>>> e : ret.entrySet()) {
List<Function<Variable>> list = e.getValue();
Collections.sort(list, new Comparator<Function<Variable>>() {
@Override
public int compare(Function<Variable> o1, Function<Variable> o2) {
if (o1 == o2)
return 0;
int s1 = o1.getArgumentNames().size(), s2 = o2.getArgumentNames().size();
if (s1 != s2)
return s2 - s1;
int c = o1.getArgumentNames().toString().compareTo(o2.getArgumentNames().toString());
if (c != 0)
return c;
// TODO Auto-generated method stub
return 1;
}
});
for (int i = 0, len = list.size(); i < len; i++) {
Function<Variable> cliquePotential = list.get(i);
Set<Variable> parameters = new TreeSet<Variable>(cliquePotential.getArgumentNames());
parameters.remove(e.getKey());
list.set(i, Functions.marginalize(cliquePotential, parameters));
}
}
System.out.println(" " +(System.currentTimeMillis() - startTime) + "ms.");
return ret;
}
}
| true
| true
|
public static final Map<Variable, List<Function<Variable>>> junctionTreeInference(Graph<Variable> graph, Map<Variable, ? extends Function<Variable>> fusionedDefinitions) throws FunctionException {
Graph<NodeSet<Variable>> junctionTree = GraphUtils.createJunctionTree(graph);
Map<Integer, Function<Variable>> cliquePotentials = new TreeMap<Integer, Function<Variable>>();
SeparatorsPotentials separatorsPotentials = new SeparatorsPotentials();
// Cliques and separator have an uniform indexing which refers to nodeSetList
List<NodeSet<Variable>> nodeSetList = junctionTree.getNodeList();
List<Integer> cliqueIndexList = new ArrayList<Integer>(nodeSetList.size());
for (int iNodeSet = nodeSetList.size(); iNodeSet-- != 0;) {
NodeSet<Variable> nodeSet = nodeSetList.get(iNodeSet);
if (nodeSet instanceof Clique) {
cliqueIndexList.add(iNodeSet);
}
}
long startTime = System.currentTimeMillis();
System.out.print("Computing potentials formulae...");
List<Variable> variableList = graph.getNodeList();
Set<Variable>
assignedVariables = new HashSet<Variable>(),
unassignedVariables = new HashSet<Variable>(variableList);
// INITIALIZATION
for (int iClique : cliqueIndexList) {
Function<Variable> product = null;
Clique<Variable> clique = (Clique<Variable>)nodeSetList.get(iClique);
Collection<Variable> cliqueNodes = clique.getNodes();
for (Variable cliqueNode : cliqueNodes) {
if (!assignedVariables.contains(cliqueNode)) {
Function<Variable> cond = fusionedDefinitions.get(cliqueNode);
if (cliqueNodes.containsAll(cond.getArgumentNames())) {
//System.out.println("Variable " + cliqueNode + " = f" + cond.getArgumentNames() +" assigned to clique " + nodeSetList.get(iClique));
assignedVariables.add(cliqueNode);
unassignedVariables.remove(cliqueNode);
product = product == null ? cond : Functions.multiply(product, cond);
}
}
}
cliquePotentials.put(iClique, product == null ? Functions.constant(1) : product);
}
if (!unassignedVariables.isEmpty()) {
throw new RuntimeException("Failed to assign all variables conditional probabilities to a node ! (remaining "+unassignedVariables+")");
//new RuntimeException("Failed to assign all variables conditional probabilities to a node ! (remaining "+unassignedVariables+")").printStackTrace();
}
// Build connectivity of cliques + initialize the separators potentials
Map<Integer,Set<Integer>> cliquesNeighbours = new TreeMap<Integer, Set<Integer>>();
for (int iClique : cliqueIndexList) {
Set<Integer> neighbourCliques = new TreeSet<Integer>();
for (int iNeighbourSeparator : junctionTree.getLocalConnectivity().getNeighbours(iClique).toArray()) {
for (int iNeighbourClique : junctionTree.getLocalConnectivity().getNeighbours(iNeighbourSeparator).toArray()) {
if (iNeighbourClique != iClique) {
neighbourCliques.add(iNeighbourClique);
//separatorsPotentials.put(iClique, iNeighbourClique, Functions.constant(1));
}
}
}
cliquesNeighbours.put(iClique, neighbourCliques);
}
Integer startingClique = cliqueIndexList.get(cliqueIndexList.size() - 1);
//System.out.println("Starting global propagation by clique " + nodeSetList.get(startingClique));
globalPropagation(startingClique, nodeSetList, cliquesNeighbours, cliquePotentials, separatorsPotentials);
//globalPropagation(cliqueIndexList.get(0), nodeSetList, cliquesNeighbours, cliquePotentials, separatorsPotentials);
// Normalize each clique's potential
for (Map.Entry<Integer, Function<Variable>> e : cliquePotentials.entrySet())
e.setValue(Functions.normalize(e.getValue(), 1));//, "{clique" + nodeSetList.get(e.getKey()) + " = f" + e.getValue().getArgumentNames()+"}"));
// Get all the potentials by clique / separator and marginalize them for each variable. Store the result by variable
Map<Variable, List<Function<Variable>>> ret = new TreeMap<Variable, List<Function<Variable>>>();
for (Variable v : graph.getNodeList()) {
ret.put(v, new ArrayList<Function<Variable>>());
}
for (Map.Entry<Integer, Function<Variable>> entry : cliquePotentials.entrySet()) {
int iClique = entry.getKey();
Function<Variable> cliquePotential = entry.getValue();
Collection<Variable> cliqueNodes = ((Clique<Variable>)nodeSetList.get(iClique)).getNodes();
// Marginalize cliquePotential for each variable of the clique
for (Variable v : cliquePotential.getArgumentNames()) {
// Set<Variable> parameters = new TreeSet<Variable>(cliquePotential.getArgumentNames());
// parameters.remove(v);
// //System.out.println(new TabulatedFunction<Variable>(cliquePotential).toString(Arrays.asList(v)));
ret.get(v).add(//Functions.normalize(
//Functions.marginalize(
//Functions.normalize(
cliquePotential//,
//cliquePotential.getArgumentNames()
//),
//parameters)
//, 1, null)
);
}
}
for (Map.Entry<Variable, List<Function<Variable>>> e : ret.entrySet()) {
List<Function<Variable>> list = e.getValue();
Collections.sort(list, new Comparator<Function<Variable>>() {
@Override
public int compare(Function<Variable> o1, Function<Variable> o2) {
if (o1 == o2)
return 0;
int s1 = o1.getArgumentNames().size(), s2 = o2.getArgumentNames().size();
if (s1 != s2)
return s2 - s1;
int c = o1.getArgumentNames().toString().compareTo(o2.getArgumentNames().toString());
if (c != 0)
return c;
// TODO Auto-generated method stub
return 1;
}
});
for (int i = 0, len = list.size(); i < len; i++) {
Function<Variable> cliquePotential = list.get(i);
Set<Variable> parameters = new TreeSet<Variable>(cliquePotential.getArgumentNames());
parameters.remove(e.getKey());
list.set(i, Functions.marginalize(cliquePotential, parameters));
}
}
System.out.println(" " +(System.currentTimeMillis() - startTime) + "ms.");
return ret;
}
|
public static final Map<Variable, List<Function<Variable>>> junctionTreeInference(Graph<Variable> graph, Map<Variable, ? extends Function<Variable>> fusionedDefinitions) throws FunctionException {
Graph<NodeSet<Variable>> junctionTree = GraphUtils.createJunctionTree(graph);
Map<Integer, Function<Variable>> cliquePotentials = new TreeMap<Integer, Function<Variable>>();
SeparatorsPotentials separatorsPotentials = new SeparatorsPotentials();
// Cliques and separator have an uniform indexing which refers to nodeSetList
List<NodeSet<Variable>> nodeSetList = junctionTree.getNodeList();
List<Integer> cliqueIndexList = new ArrayList<Integer>(nodeSetList.size());
for (int iNodeSet = nodeSetList.size(); iNodeSet-- != 0;) {
NodeSet<Variable> nodeSet = nodeSetList.get(iNodeSet);
if (nodeSet instanceof Clique) {
cliqueIndexList.add(iNodeSet);
}
}
long startTime = System.currentTimeMillis();
System.out.print("Computing potentials formulae...");
List<Variable> variableList = graph.getNodeList();
Set<Variable>
assignedVariables = new HashSet<Variable>(),
unassignedVariables = new HashSet<Variable>(variableList);
// INITIALIZATION
for (int iClique : cliqueIndexList) {
Function<Variable> product = null;
Clique<Variable> clique = (Clique<Variable>)nodeSetList.get(iClique);
Collection<Variable> cliqueNodes = clique.getNodes();
for (Variable cliqueNode : cliqueNodes) {
if (!assignedVariables.contains(cliqueNode)) {
Function<Variable> cond = fusionedDefinitions.get(cliqueNode);
if (cliqueNodes.containsAll(cond.getArgumentNames())) {
//System.out.println("Variable " + cliqueNode + " = f" + cond.getArgumentNames() +" assigned to clique " + nodeSetList.get(iClique));
assignedVariables.add(cliqueNode);
unassignedVariables.remove(cliqueNode);
product = product == null ? cond : Functions.multiply(product, cond);
}
}
}
cliquePotentials.put(iClique, product == null ? Functions.constant(1) : product);
}
if (!unassignedVariables.isEmpty()) {
throw new RuntimeException("Failed to assign all variables conditional probabilities to a node ! (remaining "+unassignedVariables+")");
//new RuntimeException("Failed to assign all variables conditional probabilities to a node ! (remaining "+unassignedVariables+")").printStackTrace();
}
// Build connectivity of cliques + initialize the separators potentials
Map<Integer,Set<Integer>> cliquesNeighbours = new TreeMap<Integer, Set<Integer>>();
for (int iClique : cliqueIndexList) {
Set<Integer> neighbourCliques = new TreeSet<Integer>();
for (int iNeighbourSeparator : junctionTree.getLocalConnectivity().getNeighbours(iClique).toArray()) {
for (int iNeighbourClique : junctionTree.getLocalConnectivity().getNeighbours(iNeighbourSeparator).toArray()) {
if (iNeighbourClique != iClique) {
neighbourCliques.add(iNeighbourClique);
//separatorsPotentials.put(iClique, iNeighbourClique, Functions.constant(1));
}
}
}
cliquesNeighbours.put(iClique, neighbourCliques);
}
Integer startingClique = cliqueIndexList.get(cliqueIndexList.size() - 1);
//System.out.println("Starting global propagation by clique " + nodeSetList.get(startingClique));
globalPropagation(startingClique, nodeSetList, cliquesNeighbours, cliquePotentials, separatorsPotentials);
//globalPropagation(cliqueIndexList.get(0), nodeSetList, cliquesNeighbours, cliquePotentials, separatorsPotentials);
// Normalize each clique's potential
for (Map.Entry<Integer, Function<Variable>> e : cliquePotentials.entrySet())
cliquePotentials.put(e.getKey(), Functions.normalize(e.getValue(), 1));//, "{clique" + nodeSetList.get(e.getKey()) + " = f" + e.getValue().getArgumentNames()+"}"));
// Get all the potentials by clique / separator and marginalize them for each variable. Store the result by variable
Map<Variable, List<Function<Variable>>> ret = new TreeMap<Variable, List<Function<Variable>>>();
for (Variable v : graph.getNodeList()) {
ret.put(v, new ArrayList<Function<Variable>>());
}
for (Map.Entry<Integer, Function<Variable>> entry : cliquePotentials.entrySet()) {
int iClique = entry.getKey();
Function<Variable> cliquePotential = entry.getValue();
Collection<Variable> cliqueNodes = ((Clique<Variable>)nodeSetList.get(iClique)).getNodes();
// Marginalize cliquePotential for each variable of the clique
for (Variable v : cliquePotential.getArgumentNames()) {
// Set<Variable> parameters = new TreeSet<Variable>(cliquePotential.getArgumentNames());
// parameters.remove(v);
// //System.out.println(new TabulatedFunction<Variable>(cliquePotential).toString(Arrays.asList(v)));
ret.get(v).add(//Functions.normalize(
//Functions.marginalize(
//Functions.normalize(
cliquePotential//,
//cliquePotential.getArgumentNames()
//),
//parameters)
//, 1, null)
);
}
}
for (Map.Entry<Variable, List<Function<Variable>>> e : ret.entrySet()) {
List<Function<Variable>> list = e.getValue();
Collections.sort(list, new Comparator<Function<Variable>>() {
@Override
public int compare(Function<Variable> o1, Function<Variable> o2) {
if (o1 == o2)
return 0;
int s1 = o1.getArgumentNames().size(), s2 = o2.getArgumentNames().size();
if (s1 != s2)
return s2 - s1;
int c = o1.getArgumentNames().toString().compareTo(o2.getArgumentNames().toString());
if (c != 0)
return c;
// TODO Auto-generated method stub
return 1;
}
});
for (int i = 0, len = list.size(); i < len; i++) {
Function<Variable> cliquePotential = list.get(i);
Set<Variable> parameters = new TreeSet<Variable>(cliquePotential.getArgumentNames());
parameters.remove(e.getKey());
list.set(i, Functions.marginalize(cliquePotential, parameters));
}
}
System.out.println(" " +(System.currentTimeMillis() - startTime) + "ms.");
return ret;
}
|
diff --git a/src/main/java/com/neodem/componentConnector/io/DefaultFileConnector.java b/src/main/java/com/neodem/componentConnector/io/DefaultFileConnector.java
index 2716df8..d0b178d 100644
--- a/src/main/java/com/neodem/componentConnector/io/DefaultFileConnector.java
+++ b/src/main/java/com/neodem/componentConnector/io/DefaultFileConnector.java
@@ -1,193 +1,193 @@
package com.neodem.componentConnector.io;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import nu.xom.Builder;
import nu.xom.Document;
import nu.xom.Element;
import nu.xom.Elements;
import nu.xom.ParsingException;
import com.neodem.componentConnector.graphics.CrudeConsoleDisplay;
import com.neodem.componentConnector.graphics.Display;
import com.neodem.componentConnector.model.Connectable;
import com.neodem.componentConnector.model.Connection;
import com.neodem.componentConnector.model.Pin;
import com.neodem.componentConnector.model.component.Component;
import com.neodem.componentConnector.model.factory.ConnectableDefinition;
import com.neodem.componentConnector.model.factory.ConnectableFactory;
import com.neodem.componentConnector.model.sets.AutoAddComponentSet;
import com.neodem.componentConnector.model.sets.ComponentSet;
public class DefaultFileConnector implements FileConnector {
private ConnectableFactory factory;
public ComponentSet read(File componentsDef, File connectablesDef,
File connectionsDef) {
loadConnectableFactory(connectablesDef);
return loadSet(componentsDef, connectionsDef);
}
protected void loadConnectableFactory(File connectableDefs) {
Collection<ConnectableDefinition> defs = loadConnectableDefs(connectableDefs);
factory = new ConnectableFactory(defs);
}
protected ComponentSet loadSet(File componentsDef, File connectionsDef) {
ComponentSet set = null;
try {
// for collecting all connectables
Map<String, Connectable> components = new HashMap<String, Connectable>();
// open the components.xml file
Builder builder = new Builder();
Document doc = builder.build(componentsDef);
Element componentsRoot = doc.getRootElement();
Element componentParent = componentsRoot.getFirstChildElement("components");
int rows = Integer.parseInt(componentParent.getAttributeValue("rows"));
int cols = Integer.parseInt(componentParent.getAttributeValue("cols"));
boolean autoLocate = Boolean.parseBoolean(componentParent.getAttributeValue("autoLocate"));
// add components
Elements componentElements = componentParent.getChildElements();
if(autoLocate) {
set = new AutoAddComponentSet(cols, rows);
for (int i = 0; i < componentElements.size(); i++) {
Element componentElement = componentElements.get(i);
String type = componentElement.getAttributeValue("type");
String name = componentElement.getAttributeValue("name");
Component component = (Component) factory.make(type, name);
if (component != null) {
((AutoAddComponentSet) set).addComponentAtRandomLocation(component);
components.put(name, component);
}
}
} else {
set = new ComponentSet(cols, rows);
for (int i = 0; i < componentElements.size(); i++) {
Element componentElement = componentElements.get(i);
String type = componentElement.getAttributeValue("type");
String name = componentElement.getAttributeValue("name");
int row = Integer.parseInt(componentElement.getAttributeValue("row"));
int col = Integer.parseInt(componentElement.getAttributeValue("col"));
boolean inverted = Boolean.parseBoolean(componentElement.getAttributeValue("inv"));
Component component = (Component) factory.make(type, name);
if (component != null) {
component.setxLoc(col);
component.setyLoc(row);
component.setInverted(inverted);
set.addComponent(component);
components.put(name, component);
}
}
}
// add connectables
Element connectableParent = componentsRoot.getFirstChildElement("connectables");
Elements connectablesElements = connectableParent.getChildElements();
for (int i = 0; i < connectablesElements.size(); i++) {
Element componentElement = connectablesElements.get(i);
String type = componentElement.getAttributeValue("type");
String name = componentElement.getAttributeValue("name");
Connectable con = factory.make(type, name);
components.put(name, con);
}
doc = builder.build(connectionsDef);
Element connectionsRoot = doc.getRootElement();
// add connections
Elements connections = connectionsRoot.getChildElements();
for (int i = 0; i < connections.size(); i++) {
Element c = connections.get(i);
String from = c.getAttributeValue("from");
String to = c.getAttributeValue("to");
String fromPinLabel = c.getAttributeValue("fromPin");
String toPinLabel = c.getAttributeValue("toPin");
Connectable fromComp = components.get(from);
Connectable toComp = components.get(to);
Collection<Pin> fromPins = fromComp.getPins(fromPinLabel);
Collection<Pin> toPins = toComp.getPins(toPinLabel);
Connection con = new Connection(fromComp, fromPins, toComp, toPins);
set.addConnection(con);
}
} catch (ParsingException ex) {
- System.err.println("Cafe con Leche is malformed today. How embarrassing!");
+ System.err.println("malformed XML file : " + ex.getMessage());
} catch (IOException ex) {
- System.err.println("Could not connect to Cafe con Leche. The site may be down.");
+ System.err.println("io error : " + ex.getMessage());
}
return set;
}
private Collection<ConnectableDefinition> loadConnectableDefs(File connectableDefs) {
Collection<ConnectableDefinition> defs = new HashSet<ConnectableDefinition>();
try {
Builder parser = new Builder();
Document doc = parser.build(connectableDefs);
Element root = doc.getRootElement();
Elements definitions = root.getChildElements();
for (int i = 0; i < definitions.size(); i++) {
Element definition = definitions.get(i);
String id = definition.getAttributeValue("id");
String pinCount = definition.getAttributeValue("pins");
String type = definition.getAttributeValue("type");
ConnectableDefinition d = new ConnectableDefinition(id, type, Integer.parseInt(pinCount));
Elements pins = definition.getChildElements();
for (int j = 0; j < pins.size(); j++) {
Element pinElement = pins.get(j);
String pinNumber = pinElement.getAttributeValue("number");
String pinName = pinElement.getAttributeValue("name");
d.addPin(Integer.parseInt(pinNumber), pinName);
}
defs.add(d);
}
} catch (ParsingException ex) {
System.err.println("Cafe con Leche is malformed today. How embarrassing!");
} catch (IOException ex) {
System.err.println("Could not connect to Cafe con Leche. The site may be down.");
}
return defs;
}
public void writeToFile(File file, ComponentSet set) {
Display d = new CrudeConsoleDisplay();
BufferedWriter out = null;
try {
out = new BufferedWriter(new FileWriter(file));
out.write(d.asString(set));
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
| false
| true
|
protected ComponentSet loadSet(File componentsDef, File connectionsDef) {
ComponentSet set = null;
try {
// for collecting all connectables
Map<String, Connectable> components = new HashMap<String, Connectable>();
// open the components.xml file
Builder builder = new Builder();
Document doc = builder.build(componentsDef);
Element componentsRoot = doc.getRootElement();
Element componentParent = componentsRoot.getFirstChildElement("components");
int rows = Integer.parseInt(componentParent.getAttributeValue("rows"));
int cols = Integer.parseInt(componentParent.getAttributeValue("cols"));
boolean autoLocate = Boolean.parseBoolean(componentParent.getAttributeValue("autoLocate"));
// add components
Elements componentElements = componentParent.getChildElements();
if(autoLocate) {
set = new AutoAddComponentSet(cols, rows);
for (int i = 0; i < componentElements.size(); i++) {
Element componentElement = componentElements.get(i);
String type = componentElement.getAttributeValue("type");
String name = componentElement.getAttributeValue("name");
Component component = (Component) factory.make(type, name);
if (component != null) {
((AutoAddComponentSet) set).addComponentAtRandomLocation(component);
components.put(name, component);
}
}
} else {
set = new ComponentSet(cols, rows);
for (int i = 0; i < componentElements.size(); i++) {
Element componentElement = componentElements.get(i);
String type = componentElement.getAttributeValue("type");
String name = componentElement.getAttributeValue("name");
int row = Integer.parseInt(componentElement.getAttributeValue("row"));
int col = Integer.parseInt(componentElement.getAttributeValue("col"));
boolean inverted = Boolean.parseBoolean(componentElement.getAttributeValue("inv"));
Component component = (Component) factory.make(type, name);
if (component != null) {
component.setxLoc(col);
component.setyLoc(row);
component.setInverted(inverted);
set.addComponent(component);
components.put(name, component);
}
}
}
// add connectables
Element connectableParent = componentsRoot.getFirstChildElement("connectables");
Elements connectablesElements = connectableParent.getChildElements();
for (int i = 0; i < connectablesElements.size(); i++) {
Element componentElement = connectablesElements.get(i);
String type = componentElement.getAttributeValue("type");
String name = componentElement.getAttributeValue("name");
Connectable con = factory.make(type, name);
components.put(name, con);
}
doc = builder.build(connectionsDef);
Element connectionsRoot = doc.getRootElement();
// add connections
Elements connections = connectionsRoot.getChildElements();
for (int i = 0; i < connections.size(); i++) {
Element c = connections.get(i);
String from = c.getAttributeValue("from");
String to = c.getAttributeValue("to");
String fromPinLabel = c.getAttributeValue("fromPin");
String toPinLabel = c.getAttributeValue("toPin");
Connectable fromComp = components.get(from);
Connectable toComp = components.get(to);
Collection<Pin> fromPins = fromComp.getPins(fromPinLabel);
Collection<Pin> toPins = toComp.getPins(toPinLabel);
Connection con = new Connection(fromComp, fromPins, toComp, toPins);
set.addConnection(con);
}
} catch (ParsingException ex) {
System.err.println("Cafe con Leche is malformed today. How embarrassing!");
} catch (IOException ex) {
System.err.println("Could not connect to Cafe con Leche. The site may be down.");
}
return set;
}
|
protected ComponentSet loadSet(File componentsDef, File connectionsDef) {
ComponentSet set = null;
try {
// for collecting all connectables
Map<String, Connectable> components = new HashMap<String, Connectable>();
// open the components.xml file
Builder builder = new Builder();
Document doc = builder.build(componentsDef);
Element componentsRoot = doc.getRootElement();
Element componentParent = componentsRoot.getFirstChildElement("components");
int rows = Integer.parseInt(componentParent.getAttributeValue("rows"));
int cols = Integer.parseInt(componentParent.getAttributeValue("cols"));
boolean autoLocate = Boolean.parseBoolean(componentParent.getAttributeValue("autoLocate"));
// add components
Elements componentElements = componentParent.getChildElements();
if(autoLocate) {
set = new AutoAddComponentSet(cols, rows);
for (int i = 0; i < componentElements.size(); i++) {
Element componentElement = componentElements.get(i);
String type = componentElement.getAttributeValue("type");
String name = componentElement.getAttributeValue("name");
Component component = (Component) factory.make(type, name);
if (component != null) {
((AutoAddComponentSet) set).addComponentAtRandomLocation(component);
components.put(name, component);
}
}
} else {
set = new ComponentSet(cols, rows);
for (int i = 0; i < componentElements.size(); i++) {
Element componentElement = componentElements.get(i);
String type = componentElement.getAttributeValue("type");
String name = componentElement.getAttributeValue("name");
int row = Integer.parseInt(componentElement.getAttributeValue("row"));
int col = Integer.parseInt(componentElement.getAttributeValue("col"));
boolean inverted = Boolean.parseBoolean(componentElement.getAttributeValue("inv"));
Component component = (Component) factory.make(type, name);
if (component != null) {
component.setxLoc(col);
component.setyLoc(row);
component.setInverted(inverted);
set.addComponent(component);
components.put(name, component);
}
}
}
// add connectables
Element connectableParent = componentsRoot.getFirstChildElement("connectables");
Elements connectablesElements = connectableParent.getChildElements();
for (int i = 0; i < connectablesElements.size(); i++) {
Element componentElement = connectablesElements.get(i);
String type = componentElement.getAttributeValue("type");
String name = componentElement.getAttributeValue("name");
Connectable con = factory.make(type, name);
components.put(name, con);
}
doc = builder.build(connectionsDef);
Element connectionsRoot = doc.getRootElement();
// add connections
Elements connections = connectionsRoot.getChildElements();
for (int i = 0; i < connections.size(); i++) {
Element c = connections.get(i);
String from = c.getAttributeValue("from");
String to = c.getAttributeValue("to");
String fromPinLabel = c.getAttributeValue("fromPin");
String toPinLabel = c.getAttributeValue("toPin");
Connectable fromComp = components.get(from);
Connectable toComp = components.get(to);
Collection<Pin> fromPins = fromComp.getPins(fromPinLabel);
Collection<Pin> toPins = toComp.getPins(toPinLabel);
Connection con = new Connection(fromComp, fromPins, toComp, toPins);
set.addConnection(con);
}
} catch (ParsingException ex) {
System.err.println("malformed XML file : " + ex.getMessage());
} catch (IOException ex) {
System.err.println("io error : " + ex.getMessage());
}
return set;
}
|
diff --git a/samples/demos/src/com/actionbarsherlock/sample/demos/ListNavigation.java b/samples/demos/src/com/actionbarsherlock/sample/demos/ListNavigation.java
index d0befa0d..493b0c8e 100644
--- a/samples/demos/src/com/actionbarsherlock/sample/demos/ListNavigation.java
+++ b/samples/demos/src/com/actionbarsherlock/sample/demos/ListNavigation.java
@@ -1,29 +1,31 @@
package com.actionbarsherlock.sample.demos;
import android.os.Bundle;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import com.actionbarsherlock.app.ActionBar;
import com.actionbarsherlock.app.SherlockActivity;
public class ListNavigation extends SherlockActivity implements ActionBar.OnNavigationListener {
@Override
public void onCreate(Bundle savedInstanceState) {
setTheme(SampleList.THEME); //Used for theme switching in samples
super.onCreate(savedInstanceState);
setContentView(R.layout.text);
getSupportActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_LIST);
- ArrayAdapter<CharSequence> list = ArrayAdapter.createFromResource(this, R.array.locations, android.R.layout.simple_dropdown_item_1line);
+ //NOTE: It is very important that you use 'sherlock_spinner_item' here
+ // and NOT 'simple_spinner_item' or you will see text color problems
+ ArrayAdapter<CharSequence> list = ArrayAdapter.createFromResource(this, R.array.locations, R.layout.sherlock_spinner_item);
list.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
getSupportActionBar().setListNavigationCallbacks(list, this);
}
@Override
public boolean onNavigationItemSelected(int itemPosition, long itemId) {
((TextView)findViewById(R.id.text)).setText("Selected: " + itemPosition);
return true;
}
}
| true
| true
|
public void onCreate(Bundle savedInstanceState) {
setTheme(SampleList.THEME); //Used for theme switching in samples
super.onCreate(savedInstanceState);
setContentView(R.layout.text);
getSupportActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_LIST);
ArrayAdapter<CharSequence> list = ArrayAdapter.createFromResource(this, R.array.locations, android.R.layout.simple_dropdown_item_1line);
list.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
getSupportActionBar().setListNavigationCallbacks(list, this);
}
|
public void onCreate(Bundle savedInstanceState) {
setTheme(SampleList.THEME); //Used for theme switching in samples
super.onCreate(savedInstanceState);
setContentView(R.layout.text);
getSupportActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_LIST);
//NOTE: It is very important that you use 'sherlock_spinner_item' here
// and NOT 'simple_spinner_item' or you will see text color problems
ArrayAdapter<CharSequence> list = ArrayAdapter.createFromResource(this, R.array.locations, R.layout.sherlock_spinner_item);
list.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
getSupportActionBar().setListNavigationCallbacks(list, this);
}
|
diff --git a/java/marytts/tools/voiceimport/HMMVoiceDataPreparation.java b/java/marytts/tools/voiceimport/HMMVoiceDataPreparation.java
index f16662dd7..93d52682f 100644
--- a/java/marytts/tools/voiceimport/HMMVoiceDataPreparation.java
+++ b/java/marytts/tools/voiceimport/HMMVoiceDataPreparation.java
@@ -1,407 +1,413 @@
/* ----------------------------------------------------------------- */
/* The HMM-Based Speech Synthesis Engine "hts_engine API" */
/* developed by HTS Working Group */
/* http://hts-engine.sourceforge.net/ */
/* ----------------------------------------------------------------- */
/* */
/* Copyright (c) 2001-2010 Nagoya Institute of Technology */
/* Department of Computer Science */
/* */
/* 2001-2008 Tokyo Institute of Technology */
/* Interdisciplinary Graduate School of */
/* Science and Engineering */
/* */
/* All rights reserved. */
/* */
/* Redistribution and use in source and binary forms, with or */
/* without modification, are permitted provided that the following */
/* conditions are met: */
/* */
/* - Redistributions of source code must retain the above copyright */
/* notice, this list of conditions and the following disclaimer. */
/* - Redistributions in binary form must reproduce the above */
/* copyright notice, this list of conditions and the following */
/* disclaimer in the documentation and/or other materials provided */
/* with the distribution. */
/* - Neither the name of the HTS working group nor the names of its */
/* contributors may be used to endorse or promote products derived */
/* from this software without specific prior written permission. */
/* */
/* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND */
/* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, */
/* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
/* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
/* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS */
/* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, */
/* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED */
/* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, */
/* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON */
/* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, */
/* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY */
/* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
/* POSSIBILITY OF SUCH DAMAGE. */
/* ----------------------------------------------------------------- */
/**
* Copyright 2011 DFKI GmbH.
* All Rights Reserved. Use is subject to license terms.
*
* This file is part of MARY TTS.
*
* MARY TTS is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package marytts.tools.voiceimport;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.SortedMap;
import java.util.TreeMap;
import marytts.util.io.FileUtils;
/**
* This program was modified from previous version to:
* 1. copy $MARY_BASE/lib/external/hts directory to the voice building directory
* 2. check again that all the external necessary programs are installed.
* 3. check as before that wav and text directories exist and make conversions:
* voiceDir/wav -> voiceDir/hts/data/raw
* userProvidedDir/utts (festival format) -> voiceDir/text (one file per transcription)
* userProvidedDir/raw move to voiceDir/hts/data/raw
*
* @author marcela
*
*/
public class HMMVoiceDataPreparation extends VoiceImportComponent{
private DatabaseLayout db;
private String name = "HMMVoiceDataPreparation";
public final String ADAPTSCRIPTS = name + ".adaptScripts";
public final String USERRAWDIR = name + ".userRawDirectory";
public final String USERUTTDIR = name + ".userUttDirectory";
private String marybase;
private String voiceDir;
private String soxPath;
private String sep;
private String dataDir;
private String scriptsDir;
public String getName(){
return name;
}
/**
* Get the map of properties2values
* containing the default values
* @return map of props2values
*/
public SortedMap<String,String> getDefaultProps(DatabaseLayout db){
this.db = db;
if (props == null){
props = new TreeMap<String,String>();
props.put(USERRAWDIR, "");
props.put(USERUTTDIR, "");
props.put(ADAPTSCRIPTS, "false");
}
return props;
}
protected void setupHelp(){
props2Help = new TreeMap<String,String>();
props2Help.put(USERRAWDIR, "raw files directory, user provided directory (default empty)");
props2Help.put(USERUTTDIR, "utterance directory (transcriptions in festival format), user provided directory (default empty)");
props2Help.put(ADAPTSCRIPTS, "ADAPTSCRIPTS=false: speaker dependent scripts, ADAPTSCRIPTS=true: speaker adaptation/adaptive scripts. ");
}
/**
* Do the computations required by this component.
*
* @return true on success, false on failure
*/
public boolean compute() throws Exception{
boolean raw = false;
boolean text = false;
boolean wav = false;
marybase = db.getProp(db.MARYBASE);
voiceDir = db.getProp(db.ROOTDIR);
soxPath = db.getExternal(db.SOXPATH);
sep = System.getProperty("file.separator");
dataDir = voiceDir + "hts" + sep + "data" + sep;
scriptsDir = dataDir + "scripts" + sep;
// For both speaker indep. or adapt scripts the programs are the same
// check again that all the external necessary programs are installed.
System.out.println("\nHMMVoiceDataPreparation:\nChecking paths of external programs");
if( !checkExternalPaths() )
return false;
if(getProp(ADAPTSCRIPTS).contentEquals("false")) {
// 1. copy from $MARY_TTS/lib/external/hts directory in the voice building directory
String sourceFolder = marybase + sep + "lib" + sep + "external" + sep + "hts";
String htsFolder = voiceDir + sep + "hts";
FileUtils.copyFolderRecursive(sourceFolder, htsFolder, false);
// 2. check as before that wav, raw and text directories exist and are in the correct place
System.out.println("\nChecking wav/raw and text directories and files for running HTS speaker independent training scripts...");
// default locations of directories:
String wavDirName = voiceDir + "wav";
String textDirName = voiceDir + "text";
String rawDirName = dataDir + "raw";
+ String uttsDirName = dataDir + "utts";
// 2.1 check raw and wav files:
String userRawDirName = getProp(USERRAWDIR);
if( existWithFiles(rawDirName) ) {
raw = true;
// the raw files should be the same as in wav file, if wav file empty convert from raw --> wav
if( !existWithFiles(wavDirName))
convertRaw2Wav(rawDirName, wavDirName);
} else {
// check if the user has provided a raw directory
if( !userRawDirName.equals("") ) {
File userRawDir = new File(userRawDirName);
// check if user provided raw dir contains files
if( existWithFiles(userRawDirName) ) {
// copy the user provided raw directory to hts/data/raw/
System.out.println("Copying files from: " + userRawDirName + " to: " + rawDirName);
FileUtils.copyFolder(userRawDirName, rawDirName);
// the raw files should be the same as in wav file, if wav file empty convert from raw --> wav
if( !existWithFiles(wavDirName))
convertRaw2Wav(rawDirName, wavDirName);
raw = true;
} else
System.out.println("User provided raw directory: " + userRawDirName + " does not exist or does not contain files\n");
}
// if we still do not have raw files...
// then there must be a wav directory, check that it contains files, if so convert wav --> raw
if(!raw) {
System.out.println("Checking if " + wavDirName + " contains files");
if( existWithFiles(wavDirName) ){
convertWav2Raw(wavDirName, rawDirName);
raw = true;
} else {
System.out.println("There are no wav files in " + wavDirName);
}
}
}
// 2.2 check text files:
if( existWithFiles(textDirName) ) {
text = true;
+ } else if( existWithFiles(uttsDirName) ) {
+ convertUtt2Text(uttsDirName, textDirName);
+ text = true;
} else {
// check if the user has provided a utterance directory
String userUttDirName = getProp(USERUTTDIR);
if( !userUttDirName.equals("") ) {
// check if user provided utt dir contains files
if( existWithFiles(userUttDirName) ) {
// convert utt --> text (transcriptions festival format --> MARY format)
convertUtt2Text(userUttDirName, textDirName);
text = true;
} else
System.out.println("User provided utterance directory: " + userUttDirName + " does not exist or does not contain files\n");
- } else
- System.out.println("\nThere are no text files in " + textDirName);
+ } else {
+ System.out.println("\nThere are no text files in " + textDirName);
+ text = false;
+ }
}
if( raw && text ){
System.out.println("\nHMMVoiceDataPreparation finished:\n" +
"HTS speaker independent scripts copied in current voice building directory --> hts\n" +
"wav/raw and text directories in place.");
return true;
}
else
return false;
} else { // ADAPTSCRIPTS == true
// Here it is checked that the raw files are in data/raw, wav, phonelab and phonefeatures must be
// provided by the user...
// 1. copy from $MARY_TTS/lib/external/hts-adapt directory in the voice building directory
String sourceFolder = marybase + sep + "lib" + sep + "external" + sep + "hts-adapt";
String htsFolder = voiceDir + sep + "hts";
FileUtils.copyFolderRecursive(sourceFolder, htsFolder, false);
// 2. check as before that wav, raw and text directories exist and are in the correct place
//System.out.println("\nChecking raw directory for running HTS adaptive training scripts...");
File dirSpeakersRaw = new File(dataDir + "/raw");
String[] speakers;
if(dirSpeakersRaw.exists() && dirSpeakersRaw.list().length > 0){
speakers = dirSpeakersRaw.list();
for(int i=0; i<speakers.length; i++){
File dirSpeakerRaw = new File(dataDir + "/raw/" + speakers[i]);
if(dirSpeakerRaw.exists() && dirSpeakerRaw.list().length > 0 ){
raw = true;
} else {
System.out.println("Error: directory " + voiceDir + "/raw/" + speakers[i] + " does not contain files." );
raw = false;
break;
}
}
} else {
System.out.println("Error: directory " + voiceDir + "/raw does not contain files." );
raw = false;
}
if( raw){
System.out.println("\nHMMVoiceDataPreparation finished:\n" +
"HTS adapt scripts copied in current voice building directory --> hts\n" +
"raw directory in place.");
return true;
}
else
return false;
}
}
/**
* Check the paths of all the necessary external programs
* @return true if all the paths are defined
*/
private boolean checkExternalPaths() throws Exception{
boolean result = true;
if ( db.getExternal(db.AWKPATH) == null ){
System.out.println(" *Missing path for awk");
result = false;
}
if (db.getExternal(db.PERLPATH) == null){
System.out.println(" *Missing path for perl");
result = false;
}
if (db.getExternal(db.BCPATH) == null){
System.out.println(" *Missing path for bc");
result = false;
}
if (db.getExternal(db.TCLPATH) == null){
System.out.println(" *Missing path for tclsh");
result = false;
}
if (db.getExternal(db.SOXPATH) == null){
System.out.println(" *Missing path for sox");
result = false;
}
if(db.getExternal(db.HTSPATH) == null){
System.out.println(" *Missing path for hts/htk");
result = false;
}
if(db.getExternal(db.HTSENGINEPATH) == null){
System.out.println(" *Missing path for hts_engine");
result = false;
}
if(db.getExternal(db.SPTKPATH) == null){
System.out.println(" *Missing path for sptk");
result = false;
}
if(db.getExternal(db.EHMMPATH) == null){
System.out.println(" *Missing path for ehmm");
result = false;
}
if(!result)
System.out.println("Please run MARYBASE/lib/external/check_install_external_programs.sh and check/install the missing programs");
else
System.out.println("Paths for all external programs are defined.");
return result;
}
/**
* Checks if the directory exist and has files
* @param dir
* @return
*/
private boolean existWithFiles(String dirName) {
File dir = new File(dirName);
if( dir.exists() && dir.list().length>0 )
return true;
else
return false;
}
private void convertRaw2Wav(String rawDirName, String wavDirName) {
String Fs = db.getProperty(db.SAMPLINGRATE);
String cmdLine;
String raw2wavCmd = scriptsDir + "raw2wav.sh";
System.out.println("Converting raw files to wav from: " + rawDirName + " to: " + wavDirName);
File wavDir = new File(wavDirName);
if(!wavDir.exists())
wavDir.mkdir();
cmdLine = "chmod +x " + raw2wavCmd;
General.launchProc(cmdLine, "raw2wav", voiceDir);
cmdLine = raw2wavCmd + " " + soxPath + " " + rawDirName + " " + wavDirName + " " + Fs;
General.launchProc(cmdLine, "raw2wav", voiceDir);
}
private void convertWav2Raw(String wavDirName, String rawDirName) {
String cmdLine;
String wav2rawCmd = scriptsDir + "wav2raw.sh";
System.out.println("Converting wav files to raw from: " + wavDirName + " to: " + rawDirName);
File rawDir = new File(rawDirName);
if(!rawDir.exists())
rawDir.mkdir();
cmdLine = "chmod +x " + wav2rawCmd;
General.launchProc(cmdLine, "wav2raw", voiceDir);
cmdLine = wav2rawCmd + " " + soxPath + " " + wavDirName + " " + rawDirName ;
General.launchProc(cmdLine, "wav2raw", voiceDir);
}
private void convertUtt2Text(String userUttDirName, String textDirName) {
String cmdLine;
String utt2transCmd = scriptsDir + "utt2trans.sh"; // festival to mary format
System.out.println("\nConverting transcription files (festival format) to text from: " + userUttDirName + " to: " + textDirName);
File textDir = new File(textDirName);
if(!textDir.exists())
textDir.mkdir();
cmdLine = "chmod +x " + utt2transCmd;
General.launchProc(cmdLine, "utt2trans", voiceDir);
cmdLine = utt2transCmd + " " + userUttDirName + " " + textDirName;
General.launchProc(cmdLine, "utt2trans", voiceDir);
}
/**
* Provide the progress of computation, in percent, or -1 if
* that feature is not implemented.
* @return -1 if not implemented, or an integer between 0 and 100.
*/
public int getProgress(){
return -1;
}
}
| false
| true
|
public boolean compute() throws Exception{
boolean raw = false;
boolean text = false;
boolean wav = false;
marybase = db.getProp(db.MARYBASE);
voiceDir = db.getProp(db.ROOTDIR);
soxPath = db.getExternal(db.SOXPATH);
sep = System.getProperty("file.separator");
dataDir = voiceDir + "hts" + sep + "data" + sep;
scriptsDir = dataDir + "scripts" + sep;
// For both speaker indep. or adapt scripts the programs are the same
// check again that all the external necessary programs are installed.
System.out.println("\nHMMVoiceDataPreparation:\nChecking paths of external programs");
if( !checkExternalPaths() )
return false;
if(getProp(ADAPTSCRIPTS).contentEquals("false")) {
// 1. copy from $MARY_TTS/lib/external/hts directory in the voice building directory
String sourceFolder = marybase + sep + "lib" + sep + "external" + sep + "hts";
String htsFolder = voiceDir + sep + "hts";
FileUtils.copyFolderRecursive(sourceFolder, htsFolder, false);
// 2. check as before that wav, raw and text directories exist and are in the correct place
System.out.println("\nChecking wav/raw and text directories and files for running HTS speaker independent training scripts...");
// default locations of directories:
String wavDirName = voiceDir + "wav";
String textDirName = voiceDir + "text";
String rawDirName = dataDir + "raw";
// 2.1 check raw and wav files:
String userRawDirName = getProp(USERRAWDIR);
if( existWithFiles(rawDirName) ) {
raw = true;
// the raw files should be the same as in wav file, if wav file empty convert from raw --> wav
if( !existWithFiles(wavDirName))
convertRaw2Wav(rawDirName, wavDirName);
} else {
// check if the user has provided a raw directory
if( !userRawDirName.equals("") ) {
File userRawDir = new File(userRawDirName);
// check if user provided raw dir contains files
if( existWithFiles(userRawDirName) ) {
// copy the user provided raw directory to hts/data/raw/
System.out.println("Copying files from: " + userRawDirName + " to: " + rawDirName);
FileUtils.copyFolder(userRawDirName, rawDirName);
// the raw files should be the same as in wav file, if wav file empty convert from raw --> wav
if( !existWithFiles(wavDirName))
convertRaw2Wav(rawDirName, wavDirName);
raw = true;
} else
System.out.println("User provided raw directory: " + userRawDirName + " does not exist or does not contain files\n");
}
// if we still do not have raw files...
// then there must be a wav directory, check that it contains files, if so convert wav --> raw
if(!raw) {
System.out.println("Checking if " + wavDirName + " contains files");
if( existWithFiles(wavDirName) ){
convertWav2Raw(wavDirName, rawDirName);
raw = true;
} else {
System.out.println("There are no wav files in " + wavDirName);
}
}
}
// 2.2 check text files:
if( existWithFiles(textDirName) ) {
text = true;
} else {
// check if the user has provided a utterance directory
String userUttDirName = getProp(USERUTTDIR);
if( !userUttDirName.equals("") ) {
// check if user provided utt dir contains files
if( existWithFiles(userUttDirName) ) {
// convert utt --> text (transcriptions festival format --> MARY format)
convertUtt2Text(userUttDirName, textDirName);
text = true;
} else
System.out.println("User provided utterance directory: " + userUttDirName + " does not exist or does not contain files\n");
} else
System.out.println("\nThere are no text files in " + textDirName);
}
if( raw && text ){
System.out.println("\nHMMVoiceDataPreparation finished:\n" +
"HTS speaker independent scripts copied in current voice building directory --> hts\n" +
"wav/raw and text directories in place.");
return true;
}
else
return false;
} else { // ADAPTSCRIPTS == true
// Here it is checked that the raw files are in data/raw, wav, phonelab and phonefeatures must be
// provided by the user...
// 1. copy from $MARY_TTS/lib/external/hts-adapt directory in the voice building directory
String sourceFolder = marybase + sep + "lib" + sep + "external" + sep + "hts-adapt";
String htsFolder = voiceDir + sep + "hts";
FileUtils.copyFolderRecursive(sourceFolder, htsFolder, false);
// 2. check as before that wav, raw and text directories exist and are in the correct place
//System.out.println("\nChecking raw directory for running HTS adaptive training scripts...");
File dirSpeakersRaw = new File(dataDir + "/raw");
String[] speakers;
if(dirSpeakersRaw.exists() && dirSpeakersRaw.list().length > 0){
speakers = dirSpeakersRaw.list();
for(int i=0; i<speakers.length; i++){
File dirSpeakerRaw = new File(dataDir + "/raw/" + speakers[i]);
if(dirSpeakerRaw.exists() && dirSpeakerRaw.list().length > 0 ){
raw = true;
} else {
System.out.println("Error: directory " + voiceDir + "/raw/" + speakers[i] + " does not contain files." );
raw = false;
break;
}
}
} else {
System.out.println("Error: directory " + voiceDir + "/raw does not contain files." );
raw = false;
}
if( raw){
System.out.println("\nHMMVoiceDataPreparation finished:\n" +
"HTS adapt scripts copied in current voice building directory --> hts\n" +
"raw directory in place.");
return true;
}
else
return false;
}
}
|
public boolean compute() throws Exception{
boolean raw = false;
boolean text = false;
boolean wav = false;
marybase = db.getProp(db.MARYBASE);
voiceDir = db.getProp(db.ROOTDIR);
soxPath = db.getExternal(db.SOXPATH);
sep = System.getProperty("file.separator");
dataDir = voiceDir + "hts" + sep + "data" + sep;
scriptsDir = dataDir + "scripts" + sep;
// For both speaker indep. or adapt scripts the programs are the same
// check again that all the external necessary programs are installed.
System.out.println("\nHMMVoiceDataPreparation:\nChecking paths of external programs");
if( !checkExternalPaths() )
return false;
if(getProp(ADAPTSCRIPTS).contentEquals("false")) {
// 1. copy from $MARY_TTS/lib/external/hts directory in the voice building directory
String sourceFolder = marybase + sep + "lib" + sep + "external" + sep + "hts";
String htsFolder = voiceDir + sep + "hts";
FileUtils.copyFolderRecursive(sourceFolder, htsFolder, false);
// 2. check as before that wav, raw and text directories exist and are in the correct place
System.out.println("\nChecking wav/raw and text directories and files for running HTS speaker independent training scripts...");
// default locations of directories:
String wavDirName = voiceDir + "wav";
String textDirName = voiceDir + "text";
String rawDirName = dataDir + "raw";
String uttsDirName = dataDir + "utts";
// 2.1 check raw and wav files:
String userRawDirName = getProp(USERRAWDIR);
if( existWithFiles(rawDirName) ) {
raw = true;
// the raw files should be the same as in wav file, if wav file empty convert from raw --> wav
if( !existWithFiles(wavDirName))
convertRaw2Wav(rawDirName, wavDirName);
} else {
// check if the user has provided a raw directory
if( !userRawDirName.equals("") ) {
File userRawDir = new File(userRawDirName);
// check if user provided raw dir contains files
if( existWithFiles(userRawDirName) ) {
// copy the user provided raw directory to hts/data/raw/
System.out.println("Copying files from: " + userRawDirName + " to: " + rawDirName);
FileUtils.copyFolder(userRawDirName, rawDirName);
// the raw files should be the same as in wav file, if wav file empty convert from raw --> wav
if( !existWithFiles(wavDirName))
convertRaw2Wav(rawDirName, wavDirName);
raw = true;
} else
System.out.println("User provided raw directory: " + userRawDirName + " does not exist or does not contain files\n");
}
// if we still do not have raw files...
// then there must be a wav directory, check that it contains files, if so convert wav --> raw
if(!raw) {
System.out.println("Checking if " + wavDirName + " contains files");
if( existWithFiles(wavDirName) ){
convertWav2Raw(wavDirName, rawDirName);
raw = true;
} else {
System.out.println("There are no wav files in " + wavDirName);
}
}
}
// 2.2 check text files:
if( existWithFiles(textDirName) ) {
text = true;
} else if( existWithFiles(uttsDirName) ) {
convertUtt2Text(uttsDirName, textDirName);
text = true;
} else {
// check if the user has provided a utterance directory
String userUttDirName = getProp(USERUTTDIR);
if( !userUttDirName.equals("") ) {
// check if user provided utt dir contains files
if( existWithFiles(userUttDirName) ) {
// convert utt --> text (transcriptions festival format --> MARY format)
convertUtt2Text(userUttDirName, textDirName);
text = true;
} else
System.out.println("User provided utterance directory: " + userUttDirName + " does not exist or does not contain files\n");
} else {
System.out.println("\nThere are no text files in " + textDirName);
text = false;
}
}
if( raw && text ){
System.out.println("\nHMMVoiceDataPreparation finished:\n" +
"HTS speaker independent scripts copied in current voice building directory --> hts\n" +
"wav/raw and text directories in place.");
return true;
}
else
return false;
} else { // ADAPTSCRIPTS == true
// Here it is checked that the raw files are in data/raw, wav, phonelab and phonefeatures must be
// provided by the user...
// 1. copy from $MARY_TTS/lib/external/hts-adapt directory in the voice building directory
String sourceFolder = marybase + sep + "lib" + sep + "external" + sep + "hts-adapt";
String htsFolder = voiceDir + sep + "hts";
FileUtils.copyFolderRecursive(sourceFolder, htsFolder, false);
// 2. check as before that wav, raw and text directories exist and are in the correct place
//System.out.println("\nChecking raw directory for running HTS adaptive training scripts...");
File dirSpeakersRaw = new File(dataDir + "/raw");
String[] speakers;
if(dirSpeakersRaw.exists() && dirSpeakersRaw.list().length > 0){
speakers = dirSpeakersRaw.list();
for(int i=0; i<speakers.length; i++){
File dirSpeakerRaw = new File(dataDir + "/raw/" + speakers[i]);
if(dirSpeakerRaw.exists() && dirSpeakerRaw.list().length > 0 ){
raw = true;
} else {
System.out.println("Error: directory " + voiceDir + "/raw/" + speakers[i] + " does not contain files." );
raw = false;
break;
}
}
} else {
System.out.println("Error: directory " + voiceDir + "/raw does not contain files." );
raw = false;
}
if( raw){
System.out.println("\nHMMVoiceDataPreparation finished:\n" +
"HTS adapt scripts copied in current voice building directory --> hts\n" +
"raw directory in place.");
return true;
}
else
return false;
}
}
|
diff --git a/parser/src/main/java/parser/flatzinc/FZNLayout.java b/parser/src/main/java/parser/flatzinc/FZNLayout.java
index fa130fa6a..56ee6f207 100644
--- a/parser/src/main/java/parser/flatzinc/FZNLayout.java
+++ b/parser/src/main/java/parser/flatzinc/FZNLayout.java
@@ -1,175 +1,175 @@
/**
* Copyright (c) 1999-2011, Ecole des Mines de Nantes
* All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the Ecole des Mines de Nantes nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package parser.flatzinc;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import parser.flatzinc.ast.expression.EArray;
import parser.flatzinc.ast.expression.ESetBounds;
import parser.flatzinc.ast.expression.ESetList;
import parser.flatzinc.ast.expression.Expression;
import solver.objective.NoObjectiveManager;
import solver.search.loop.AbstractSearchLoop;
import solver.search.loop.monitors.ISearchMonitor;
import solver.search.loop.monitors.VoidSearchMonitor;
import solver.variables.IntVar;
import java.util.ArrayList;
import java.util.List;
/**
* <br/>
*
* @author Charles Prud'homme
* @since 27/01/11
*/
public final class FZNLayout extends VoidSearchMonitor implements ISearchMonitor {
protected static final Logger LOGGER = LoggerFactory.getLogger("fzn");
List<String> output_names;
List<IntVar> output_vars;
List<String> output_arrays_names;
List<IntVar[]> output_arrays_vars;
StringBuilder stringBuilder = new StringBuilder();
AbstractSearchLoop searchLoop;
public FZNLayout() {
super();
output_vars = new ArrayList<IntVar>();
output_names = new ArrayList<String>();
output_arrays_names = new ArrayList<String>();
output_arrays_vars = new ArrayList<IntVar[]>();
}
@Override
public void onSolution() {
if (LOGGER.isInfoEnabled()) {
for (int i = 0; i < output_vars.size(); i++) {
LOGGER.info("{} = {};", output_names.get(i), output_vars.get(i).getValue());
}
for (int i = 0; i < output_arrays_vars.size(); i++) {
String name = output_arrays_names.get(i);
IntVar[] ivars = output_arrays_vars.get(i);
stringBuilder.append(ivars[0].getValue());
for (int j = 1; j < ivars.length; j++) {
stringBuilder.append(", ").append(ivars[j].getValue());
}
LOGGER.info(name, stringBuilder.toString());
stringBuilder.setLength(0);
}
LOGGER.info("----------");
}
}
@Override
public void beforeClose () {
if (LOGGER.isInfoEnabled()) {
if (searchLoop.getMeasures().getSolutionCount() == 0) {
if (searchLoop.getLimitsBox().isReached()) {
LOGGER.info("=====UNKNOWN=====");
} else {
LOGGER.info("=====UNSATISFIABLE=====");
}
} else {
if (searchLoop.getLimitsBox().isReached()
&& !(searchLoop.getObjectivemanager() instanceof NoObjectiveManager)) {
- LOGGER.info("=====UNBOUDNED=====");
+ LOGGER.info("=====UNBOUNDED=====");
} else {
LOGGER.info("==========");
}
}
if (LOGGER.isInfoEnabled()) {
LOGGER.info("% - Search statistics");
LOGGER.info("% \t Solutions : {}", searchLoop.getMeasures().getSolutionCount());
LOGGER.info("% \t Building time : {}ms", searchLoop.getMeasures().getReadingTimeCount());
LOGGER.info("% \t Initial propagation : {}ms", searchLoop.getMeasures().getInitialPropagationTimeCount());
LOGGER.info("% \t Resolution : {}ms", searchLoop.getMeasures().getTimeCount());
LOGGER.info("% \t Nodes : {}", searchLoop.getMeasures().getNodeCount());
LOGGER.info("% \t Backtracks : {}", searchLoop.getMeasures().getBackTrackCount());
LOGGER.info("% \t Fails : {}", searchLoop.getMeasures().getFailCount());
LOGGER.info("% \t Restarts : {}", searchLoop.getMeasures().getRestartCount());
LOGGER.info("% \t Memory : {}", searchLoop.getMeasures().getUsedMemory());
LOGGER.info("% \t Variables : {}", searchLoop.getSolver().getVars().length);
LOGGER.info("% \t Constraints : {}", searchLoop.getSolver().getCstrs().length);
LOGGER.info("% \t Checks : {} + {}", searchLoop.getMeasures().getEventsCount(),
searchLoop.getMeasures().getPropagationsCount());
}
}
}
public void addOutputVar(String name, IntVar variable) {
output_names.add(name);
output_vars.add(variable);
}
public void addOutputArrays(String name, IntVar[] variables, List<Expression> indices) {
EArray array = (EArray) indices.get(0);
stringBuilder.append(name).append(" = array").append(array.what.size()).append("d(");
build(stringBuilder, array.getWhat_i(0));
for (int i = 1; i < array.what.size(); i++) {
stringBuilder.append(',');
build(stringBuilder, array.getWhat_i(i));
}
stringBuilder.append(",[{}]);");
output_arrays_names.add(stringBuilder.toString());
output_arrays_vars.add(variables.clone());
stringBuilder.setLength(0);
}
private int[] build(StringBuilder st, Expression exp) {
switch (exp.getTypeOf()) {
case INT:
int idx = exp.intValue();
st.append(idx);
return new int[]{idx};
case SET_B:
ESetBounds esb = (ESetBounds) exp;
st.append(esb.toString());
return esb.enumVal();
case SET_L:
ESetList esl = (ESetList) exp;
st.append(esl.toString());
return esl.enumVal();
default:
LOGGER.warn("output_array:: Unknow index {}", exp.getTypeOf());
return new int[0];
}
}
public void setSearchLoop(AbstractSearchLoop searchLoop) {
searchLoop.plugSearchMonitor(this);
this.searchLoop = searchLoop;
}
}
| true
| true
|
public void beforeClose () {
if (LOGGER.isInfoEnabled()) {
if (searchLoop.getMeasures().getSolutionCount() == 0) {
if (searchLoop.getLimitsBox().isReached()) {
LOGGER.info("=====UNKNOWN=====");
} else {
LOGGER.info("=====UNSATISFIABLE=====");
}
} else {
if (searchLoop.getLimitsBox().isReached()
&& !(searchLoop.getObjectivemanager() instanceof NoObjectiveManager)) {
LOGGER.info("=====UNBOUDNED=====");
} else {
LOGGER.info("==========");
}
}
if (LOGGER.isInfoEnabled()) {
LOGGER.info("% - Search statistics");
LOGGER.info("% \t Solutions : {}", searchLoop.getMeasures().getSolutionCount());
LOGGER.info("% \t Building time : {}ms", searchLoop.getMeasures().getReadingTimeCount());
LOGGER.info("% \t Initial propagation : {}ms", searchLoop.getMeasures().getInitialPropagationTimeCount());
LOGGER.info("% \t Resolution : {}ms", searchLoop.getMeasures().getTimeCount());
LOGGER.info("% \t Nodes : {}", searchLoop.getMeasures().getNodeCount());
LOGGER.info("% \t Backtracks : {}", searchLoop.getMeasures().getBackTrackCount());
LOGGER.info("% \t Fails : {}", searchLoop.getMeasures().getFailCount());
LOGGER.info("% \t Restarts : {}", searchLoop.getMeasures().getRestartCount());
LOGGER.info("% \t Memory : {}", searchLoop.getMeasures().getUsedMemory());
LOGGER.info("% \t Variables : {}", searchLoop.getSolver().getVars().length);
LOGGER.info("% \t Constraints : {}", searchLoop.getSolver().getCstrs().length);
LOGGER.info("% \t Checks : {} + {}", searchLoop.getMeasures().getEventsCount(),
searchLoop.getMeasures().getPropagationsCount());
}
}
}
|
public void beforeClose () {
if (LOGGER.isInfoEnabled()) {
if (searchLoop.getMeasures().getSolutionCount() == 0) {
if (searchLoop.getLimitsBox().isReached()) {
LOGGER.info("=====UNKNOWN=====");
} else {
LOGGER.info("=====UNSATISFIABLE=====");
}
} else {
if (searchLoop.getLimitsBox().isReached()
&& !(searchLoop.getObjectivemanager() instanceof NoObjectiveManager)) {
LOGGER.info("=====UNBOUNDED=====");
} else {
LOGGER.info("==========");
}
}
if (LOGGER.isInfoEnabled()) {
LOGGER.info("% - Search statistics");
LOGGER.info("% \t Solutions : {}", searchLoop.getMeasures().getSolutionCount());
LOGGER.info("% \t Building time : {}ms", searchLoop.getMeasures().getReadingTimeCount());
LOGGER.info("% \t Initial propagation : {}ms", searchLoop.getMeasures().getInitialPropagationTimeCount());
LOGGER.info("% \t Resolution : {}ms", searchLoop.getMeasures().getTimeCount());
LOGGER.info("% \t Nodes : {}", searchLoop.getMeasures().getNodeCount());
LOGGER.info("% \t Backtracks : {}", searchLoop.getMeasures().getBackTrackCount());
LOGGER.info("% \t Fails : {}", searchLoop.getMeasures().getFailCount());
LOGGER.info("% \t Restarts : {}", searchLoop.getMeasures().getRestartCount());
LOGGER.info("% \t Memory : {}", searchLoop.getMeasures().getUsedMemory());
LOGGER.info("% \t Variables : {}", searchLoop.getSolver().getVars().length);
LOGGER.info("% \t Constraints : {}", searchLoop.getSolver().getCstrs().length);
LOGGER.info("% \t Checks : {} + {}", searchLoop.getMeasures().getEventsCount(),
searchLoop.getMeasures().getPropagationsCount());
}
}
}
|
diff --git a/app/src/com/halcyonwaves/apps/meinemediathek/fragments/MovieSearchFragment.java b/app/src/com/halcyonwaves/apps/meinemediathek/fragments/MovieSearchFragment.java
index 2b2eaba..e072497 100644
--- a/app/src/com/halcyonwaves/apps/meinemediathek/fragments/MovieSearchFragment.java
+++ b/app/src/com/halcyonwaves/apps/meinemediathek/fragments/MovieSearchFragment.java
@@ -1,107 +1,109 @@
package com.halcyonwaves.apps.meinemediathek.fragments;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import android.app.AlertDialog;
import android.app.Fragment;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.net.Uri;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import com.halcyonwaves.apps.meinemediathek.ChangeLogDialog;
import com.halcyonwaves.apps.meinemediathek.Consts;
import com.halcyonwaves.apps.meinemediathek.R;
import com.halcyonwaves.apps.meinemediathek.activities.SearchResultsActivity;
public class MovieSearchFragment extends Fragment {
private final static String TAG = "MovieSearchFragment";
private Button btnSearch = null;
private EditText etTitleToSearchFor = null;
@Override
public View onCreateView( final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState ) {
// get the basic view layout from the application resources
final View v = inflater.inflate( R.layout.fragment_moviesearch, container );
// get the handles to the controls we have to access
this.btnSearch = (Button) v.findViewById( R.id.btn_search );
this.etTitleToSearchFor = (EditText) v.findViewById( R.id.et_searchfortitle );
// set the behavior for the search button
this.btnSearch.setOnClickListener( new OnClickListener() {
@Override
public void onClick( final View v ) {
final Intent intent = new Intent( MovieSearchFragment.this.getActivity(), SearchResultsActivity.class );
intent.putExtra( "searchFor", MovieSearchFragment.this.etTitleToSearchFor.getText().toString() );
MovieSearchFragment.this.startActivity( intent );
}
} );
// get the preferences of the application
final SharedPreferences appPreferences = PreferenceManager.getDefaultSharedPreferences( this.getActivity().getApplicationContext() );
// if this is the first time the user uses this application , he or she has to agree that he or she
// will not do anything harmful
if( !appPreferences.getBoolean( Consts.PREFERENCE_LICENSE_ACCEPTED, false ) ) {
// prepare a dialog asking the user he or she really wants to do the download on a mobile connection
final AlertDialog.Builder builder = new AlertDialog.Builder( MovieSearchFragment.this.getActivity() );
builder.setMessage( R.string.dlg_msg_license ).setTitle( R.string.dlg_title_license ).setPositiveButton( R.string.btn_agree, new DialogInterface.OnClickListener() {
@Override
public void onClick( final DialogInterface dialog, final int id ) {
// the user accepted the license, so store this in the application settings and proceed
Editor prefEditor = appPreferences.edit();
prefEditor.putBoolean( Consts.PREFERENCE_LICENSE_ACCEPTED, true );
prefEditor.putString( Consts.PREFERENCE_LICENSE_AGREEMENT_TIME, SimpleDateFormat.getDateInstance( SimpleDateFormat.FULL, Locale.US ).format( new Date() ) );
prefEditor.commit();
prefEditor = null;
}
} ).setNegativeButton( R.string.btn_disagree, new DialogInterface.OnClickListener() {
@Override
public void onClick( final DialogInterface dialog, final int id ) {
// if the user disagreed, we have to show him the play store for uninstalling the application
try {
final Intent intent = new Intent( Intent.ACTION_VIEW );
intent.setData( Uri.parse( "market://details?id=com.halcyonwaves.apps.meinemediathek" ) );
MovieSearchFragment.this.startActivity( intent );
- MovieSearchFragment.this.getActivity().finish();
} catch( final Exception e ) {
Log.e( MovieSearchFragment.TAG, "Failed to open the Google Play store to rate the application!" );
}
+ MovieSearchFragment.this.getActivity().finish();
}
} ).setCancelable( false );
// show the dialog to the user
final AlertDialog askUserDialog = builder.create();
askUserDialog.show();
}
- //
- final ChangeLogDialog changelogDlg = new ChangeLogDialog( this.getActivity() );
- changelogDlg.show();
+ // just show the changelog if its not the first start, otherwise it wont be interesting for the user
+ else {
+ final ChangeLogDialog changelogDlg = new ChangeLogDialog( this.getActivity() );
+ changelogDlg.show();
+ }
// return the created view for the fragment
return v;
}
}
| false
| true
|
public View onCreateView( final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState ) {
// get the basic view layout from the application resources
final View v = inflater.inflate( R.layout.fragment_moviesearch, container );
// get the handles to the controls we have to access
this.btnSearch = (Button) v.findViewById( R.id.btn_search );
this.etTitleToSearchFor = (EditText) v.findViewById( R.id.et_searchfortitle );
// set the behavior for the search button
this.btnSearch.setOnClickListener( new OnClickListener() {
@Override
public void onClick( final View v ) {
final Intent intent = new Intent( MovieSearchFragment.this.getActivity(), SearchResultsActivity.class );
intent.putExtra( "searchFor", MovieSearchFragment.this.etTitleToSearchFor.getText().toString() );
MovieSearchFragment.this.startActivity( intent );
}
} );
// get the preferences of the application
final SharedPreferences appPreferences = PreferenceManager.getDefaultSharedPreferences( this.getActivity().getApplicationContext() );
// if this is the first time the user uses this application , he or she has to agree that he or she
// will not do anything harmful
if( !appPreferences.getBoolean( Consts.PREFERENCE_LICENSE_ACCEPTED, false ) ) {
// prepare a dialog asking the user he or she really wants to do the download on a mobile connection
final AlertDialog.Builder builder = new AlertDialog.Builder( MovieSearchFragment.this.getActivity() );
builder.setMessage( R.string.dlg_msg_license ).setTitle( R.string.dlg_title_license ).setPositiveButton( R.string.btn_agree, new DialogInterface.OnClickListener() {
@Override
public void onClick( final DialogInterface dialog, final int id ) {
// the user accepted the license, so store this in the application settings and proceed
Editor prefEditor = appPreferences.edit();
prefEditor.putBoolean( Consts.PREFERENCE_LICENSE_ACCEPTED, true );
prefEditor.putString( Consts.PREFERENCE_LICENSE_AGREEMENT_TIME, SimpleDateFormat.getDateInstance( SimpleDateFormat.FULL, Locale.US ).format( new Date() ) );
prefEditor.commit();
prefEditor = null;
}
} ).setNegativeButton( R.string.btn_disagree, new DialogInterface.OnClickListener() {
@Override
public void onClick( final DialogInterface dialog, final int id ) {
// if the user disagreed, we have to show him the play store for uninstalling the application
try {
final Intent intent = new Intent( Intent.ACTION_VIEW );
intent.setData( Uri.parse( "market://details?id=com.halcyonwaves.apps.meinemediathek" ) );
MovieSearchFragment.this.startActivity( intent );
MovieSearchFragment.this.getActivity().finish();
} catch( final Exception e ) {
Log.e( MovieSearchFragment.TAG, "Failed to open the Google Play store to rate the application!" );
}
}
} ).setCancelable( false );
// show the dialog to the user
final AlertDialog askUserDialog = builder.create();
askUserDialog.show();
}
//
final ChangeLogDialog changelogDlg = new ChangeLogDialog( this.getActivity() );
changelogDlg.show();
// return the created view for the fragment
return v;
}
|
public View onCreateView( final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState ) {
// get the basic view layout from the application resources
final View v = inflater.inflate( R.layout.fragment_moviesearch, container );
// get the handles to the controls we have to access
this.btnSearch = (Button) v.findViewById( R.id.btn_search );
this.etTitleToSearchFor = (EditText) v.findViewById( R.id.et_searchfortitle );
// set the behavior for the search button
this.btnSearch.setOnClickListener( new OnClickListener() {
@Override
public void onClick( final View v ) {
final Intent intent = new Intent( MovieSearchFragment.this.getActivity(), SearchResultsActivity.class );
intent.putExtra( "searchFor", MovieSearchFragment.this.etTitleToSearchFor.getText().toString() );
MovieSearchFragment.this.startActivity( intent );
}
} );
// get the preferences of the application
final SharedPreferences appPreferences = PreferenceManager.getDefaultSharedPreferences( this.getActivity().getApplicationContext() );
// if this is the first time the user uses this application , he or she has to agree that he or she
// will not do anything harmful
if( !appPreferences.getBoolean( Consts.PREFERENCE_LICENSE_ACCEPTED, false ) ) {
// prepare a dialog asking the user he or she really wants to do the download on a mobile connection
final AlertDialog.Builder builder = new AlertDialog.Builder( MovieSearchFragment.this.getActivity() );
builder.setMessage( R.string.dlg_msg_license ).setTitle( R.string.dlg_title_license ).setPositiveButton( R.string.btn_agree, new DialogInterface.OnClickListener() {
@Override
public void onClick( final DialogInterface dialog, final int id ) {
// the user accepted the license, so store this in the application settings and proceed
Editor prefEditor = appPreferences.edit();
prefEditor.putBoolean( Consts.PREFERENCE_LICENSE_ACCEPTED, true );
prefEditor.putString( Consts.PREFERENCE_LICENSE_AGREEMENT_TIME, SimpleDateFormat.getDateInstance( SimpleDateFormat.FULL, Locale.US ).format( new Date() ) );
prefEditor.commit();
prefEditor = null;
}
} ).setNegativeButton( R.string.btn_disagree, new DialogInterface.OnClickListener() {
@Override
public void onClick( final DialogInterface dialog, final int id ) {
// if the user disagreed, we have to show him the play store for uninstalling the application
try {
final Intent intent = new Intent( Intent.ACTION_VIEW );
intent.setData( Uri.parse( "market://details?id=com.halcyonwaves.apps.meinemediathek" ) );
MovieSearchFragment.this.startActivity( intent );
} catch( final Exception e ) {
Log.e( MovieSearchFragment.TAG, "Failed to open the Google Play store to rate the application!" );
}
MovieSearchFragment.this.getActivity().finish();
}
} ).setCancelable( false );
// show the dialog to the user
final AlertDialog askUserDialog = builder.create();
askUserDialog.show();
}
// just show the changelog if its not the first start, otherwise it wont be interesting for the user
else {
final ChangeLogDialog changelogDlg = new ChangeLogDialog( this.getActivity() );
changelogDlg.show();
}
// return the created view for the fragment
return v;
}
|
diff --git a/src/navigation/Navigator.java b/src/navigation/Navigator.java
index fa5429a..f867606 100644
--- a/src/navigation/Navigator.java
+++ b/src/navigation/Navigator.java
@@ -1,291 +1,291 @@
package navigation;
/**
* Sets a path on the wavefront grid and navigates tile per tile to destination
*
* @author Team 13
*
*/
import lejos.nxt.UltrasonicSensor;
import lejos.util.Delay;
import odometry.Odometer;
import navigation.Pilot;
import navigation.Map;
public class Navigator extends Pilot{
private Map myMap;
UltrasonicSensor USSensor;
public Navigator(Odometer odo, Map map, UltrasonicSensor USSensor){
super(odo);
this.myMap = map;
this.USSensor = USSensor;
}
public void navigateTo(double destX, double destY){
int [][] grid = setPath(destX, destY); // create the wavefront grid
navigatePath(grid); //navigate the wavefront grid until goal is reached
}
public int[][] setPath(double destX, double destY){
// copy grid to a temporary grid for traveling purposes
int [][] grid = new int[10][10];
for(int i=0;i<myMap.getGrid().length;i++){
for(int j=0;j<myMap.getGrid()[i].length;j++){
grid[i][j] = myMap.getGrid()[i][j];
}
}
// retrieve the positions in terms of grid coordinates
int currentI = myMap.currentI();
int currentJ = myMap.currentJ();
int destJ = myMap.destJ(destX);
int destI = myMap.destI(destY);
// set the goal on the grid map
grid[destI][destJ]=2;
// set the path on the grid map
boolean foundWave = true;
int currentWave = 2; //Looking for goal first
while(foundWave == true){
foundWave = false;
for(int y=0; y<grid.length; y++){
for(int x=0;x<grid[y].length; x++){
if(grid[x][y] == currentWave){
foundWave = true;
int goal_x = x;
int goal_y = y;
//This code checks the NORTH direction
if(goal_x > 0 && grid[goal_x-1][goal_y] == 0){ //This code checks the array bounds heading NORTH
grid[goal_x-1][goal_y] = currentWave + 1;
}
//This code checks the SOUTH direction
if(goal_x < (10 - 1) && grid[goal_x+1][goal_y] == 0){ //This code checks the array bounds heading SOUTH
grid[goal_x+1][goal_y] = currentWave + 1;
}
//This code checks the WEST direction
if(goal_y > 0 && grid[goal_x][goal_y-1] == 0){//This code checks the array bounds heading WEST
grid[goal_x][goal_y-1] = currentWave + 1;
}
//This code checks the EAST direction
if(goal_y < (10 - 1) && grid[goal_x][goal_y+1] == 0){//This code checks the array bounds heading EAST
grid[goal_x][goal_y+1] = currentWave + 1;
}
}
}
}
currentWave++;
}
// set the robot on the grid map
grid[currentI][currentJ]=99;
return grid;
}
public void navigatePath(int [][] grid){
int robot_I = 0;
int robot_J = 0;
// First - find robot location by grid
for(int i=0; i < grid.length; i++){
for(int j=0; j < grid[i].length; j++){
if(grid[i][j] == 99){
robot_I = i;
robot_J = j;
}
}
}
// Second - Found robot location, start deciding next block and continue on until goal is reached
int current_I = robot_I;
int current_J = robot_J;
int current_low = 99;
double destX;
double destY;
double direction = 0.0;
boolean obstacleDetected = false;
while(current_low > 2){
current_low = 99; //Every time, reset to highest number (robot)
int Next_I = 0;
int Next_J = 0;
// Check Array Bounds North
// Is current space occupied?
if(current_I > 0 && grid[current_I-1][current_J] < current_low && grid[current_I-1][current_J] != 1){
current_low = grid[current_I-1][current_J]; //Set next number
Next_I = current_I-1; //Set Next Direction as North
Next_J = current_J;
direction = 90.0;
}
// Check Array Bounds South
// Is current space occupied?
if(current_I < (10 - 1) && grid[current_I+1][current_J] < current_low && grid[current_I+1][current_J] != 1){
current_low = grid[current_I+1][current_J]; //Set next number
Next_I = current_I+1; //Set Next Direction as South
Next_J = current_J;
direction = 270.0;
}
// Check Array Bounds West
// Is current space occupied?
if(current_J > 0 && grid[current_I][current_J-1] < current_low && grid[current_I][current_J-1] != 1){
current_low = grid[current_I][current_J-1]; //Set next number
Next_I = current_I; //Set Next Direction as west
Next_J = current_J-1;
direction = 180.0;
}
// Check Array Bounds East
// Is current space occupied?
if(current_J < (10 - 1) && grid[current_I][current_J+1] < current_low && grid[current_I][current_J+1] != 1){
current_low = grid[current_I][current_J+1]; //Set next number
Next_I = current_I; //Set Next Direction as East
Next_J = current_J+1;
direction = 0.00;
}
// Okay - We know the number we're heading for, the direction and the coordinates.
destX = destX(Next_J);
destY = destY(Next_I);
// turn to direction of next tile
- turnTo(direction, true);
+ turnTo(direction, false);
try { Thread.sleep(500); } catch (InterruptedException e) {}
// check if obstacle is detected
int o1 = getFilteredData();
Delay.msDelay(50);
int o2 = getFilteredData();
Delay.msDelay(50);
int o3 = getFilteredData();
Delay.msDelay(50);
int o4 = getFilteredData();
Delay.msDelay(50);
int o5 = getFilteredData();
if(o1<35 && o2<35 && o3<35 && o4<35 && o5<35 && grid[Next_I][Next_J] != 1){
// stop the following travelTo method
obstacleDetected = true;
// set the obstacle on the Map grid
myMap.getGrid()[Next_I][Next_J] = Map.OBSTACLE;
// find destination location by grid
int finalJ = 0;
int finalI = 0;
for(int i=0; i < grid.length; i++){
for(int j=0; j < grid[i].length; j++){
if(grid[i][j] == 2){
finalI = i;
finalJ = j;
}
}
}
// recursion
navigateTo(myMap.destX(finalJ),myMap.destY(finalI));
// terminate this loop
current_low = 2;
}
// if no obstacle move to next block
if(!obstacleDetected){
- travelTo(destX, destY); // travel to next tile
+ travelTo2(destX, destY); // travel to next tile
current_I = Next_I; // update new I position for loop
current_J = Next_J; // update new J position for loop
}
}
}
private int getFilteredData() {
int distance;
int filterControl = 0;
int FILTER_OUT = 20;
// do a ping
USSensor.ping();
// wait for the ping to complete
try { Thread.sleep(100); } catch (InterruptedException e) {}
// there will be a delay here
distance = USSensor.getDistance();
//Rudimentary filter from wall following lab
if (distance == 255 && filterControl < FILTER_OUT) {
// bad value, do not set the distance variable, however do increment the filter value
filterControl ++;
} else if (distance == 255){
// true 255, therefore set distance to 255
} else {
// distance went below 255, therefore reset everything.
filterControl = 0;
}
return distance;
}
public static double destX(int destJ){
double[] coordsX = new double[10];
double[] coordsY = new double[10];
double tileWidth = 30.0;
double sumX = tileWidth/2;
double sumY = tileWidth/2 + (tileWidth)*9;
// build x coordinates of map
for(int i=0;i<coordsX.length;i++){
coordsX[i] = sumX;
sumX = sumX + tileWidth;
}
//build y coordinates of map
for(int j=0;j<coordsY.length;j++){
coordsY[j] = sumY;
sumY = sumY - tileWidth;
}
return coordsX[destJ];
}
public static double destY(int destI){
double[] coordsX = new double[10];
double[] coordsY = new double[10];
double tileWidth = 30.0;
double sumX = tileWidth/2;
double sumY = tileWidth/2 + (tileWidth)*9;
// build x coordinates of map
for(int i=0;i<coordsX.length;i++){
coordsX[i] = sumX;
sumX = sumX + tileWidth;
}
//build y coordinates of map
for(int j=0;j<coordsY.length;j++){
coordsY[j] = sumY;
sumY = sumY - tileWidth;
}
return coordsY[destI];
}
}
| false
| true
|
public void navigatePath(int [][] grid){
int robot_I = 0;
int robot_J = 0;
// First - find robot location by grid
for(int i=0; i < grid.length; i++){
for(int j=0; j < grid[i].length; j++){
if(grid[i][j] == 99){
robot_I = i;
robot_J = j;
}
}
}
// Second - Found robot location, start deciding next block and continue on until goal is reached
int current_I = robot_I;
int current_J = robot_J;
int current_low = 99;
double destX;
double destY;
double direction = 0.0;
boolean obstacleDetected = false;
while(current_low > 2){
current_low = 99; //Every time, reset to highest number (robot)
int Next_I = 0;
int Next_J = 0;
// Check Array Bounds North
// Is current space occupied?
if(current_I > 0 && grid[current_I-1][current_J] < current_low && grid[current_I-1][current_J] != 1){
current_low = grid[current_I-1][current_J]; //Set next number
Next_I = current_I-1; //Set Next Direction as North
Next_J = current_J;
direction = 90.0;
}
// Check Array Bounds South
// Is current space occupied?
if(current_I < (10 - 1) && grid[current_I+1][current_J] < current_low && grid[current_I+1][current_J] != 1){
current_low = grid[current_I+1][current_J]; //Set next number
Next_I = current_I+1; //Set Next Direction as South
Next_J = current_J;
direction = 270.0;
}
// Check Array Bounds West
// Is current space occupied?
if(current_J > 0 && grid[current_I][current_J-1] < current_low && grid[current_I][current_J-1] != 1){
current_low = grid[current_I][current_J-1]; //Set next number
Next_I = current_I; //Set Next Direction as west
Next_J = current_J-1;
direction = 180.0;
}
// Check Array Bounds East
// Is current space occupied?
if(current_J < (10 - 1) && grid[current_I][current_J+1] < current_low && grid[current_I][current_J+1] != 1){
current_low = grid[current_I][current_J+1]; //Set next number
Next_I = current_I; //Set Next Direction as East
Next_J = current_J+1;
direction = 0.00;
}
// Okay - We know the number we're heading for, the direction and the coordinates.
destX = destX(Next_J);
destY = destY(Next_I);
// turn to direction of next tile
turnTo(direction, true);
try { Thread.sleep(500); } catch (InterruptedException e) {}
// check if obstacle is detected
int o1 = getFilteredData();
Delay.msDelay(50);
int o2 = getFilteredData();
Delay.msDelay(50);
int o3 = getFilteredData();
Delay.msDelay(50);
int o4 = getFilteredData();
Delay.msDelay(50);
int o5 = getFilteredData();
if(o1<35 && o2<35 && o3<35 && o4<35 && o5<35 && grid[Next_I][Next_J] != 1){
// stop the following travelTo method
obstacleDetected = true;
// set the obstacle on the Map grid
myMap.getGrid()[Next_I][Next_J] = Map.OBSTACLE;
// find destination location by grid
int finalJ = 0;
int finalI = 0;
for(int i=0; i < grid.length; i++){
for(int j=0; j < grid[i].length; j++){
if(grid[i][j] == 2){
finalI = i;
finalJ = j;
}
}
}
// recursion
navigateTo(myMap.destX(finalJ),myMap.destY(finalI));
// terminate this loop
current_low = 2;
}
// if no obstacle move to next block
if(!obstacleDetected){
travelTo(destX, destY); // travel to next tile
current_I = Next_I; // update new I position for loop
current_J = Next_J; // update new J position for loop
}
}
}
|
public void navigatePath(int [][] grid){
int robot_I = 0;
int robot_J = 0;
// First - find robot location by grid
for(int i=0; i < grid.length; i++){
for(int j=0; j < grid[i].length; j++){
if(grid[i][j] == 99){
robot_I = i;
robot_J = j;
}
}
}
// Second - Found robot location, start deciding next block and continue on until goal is reached
int current_I = robot_I;
int current_J = robot_J;
int current_low = 99;
double destX;
double destY;
double direction = 0.0;
boolean obstacleDetected = false;
while(current_low > 2){
current_low = 99; //Every time, reset to highest number (robot)
int Next_I = 0;
int Next_J = 0;
// Check Array Bounds North
// Is current space occupied?
if(current_I > 0 && grid[current_I-1][current_J] < current_low && grid[current_I-1][current_J] != 1){
current_low = grid[current_I-1][current_J]; //Set next number
Next_I = current_I-1; //Set Next Direction as North
Next_J = current_J;
direction = 90.0;
}
// Check Array Bounds South
// Is current space occupied?
if(current_I < (10 - 1) && grid[current_I+1][current_J] < current_low && grid[current_I+1][current_J] != 1){
current_low = grid[current_I+1][current_J]; //Set next number
Next_I = current_I+1; //Set Next Direction as South
Next_J = current_J;
direction = 270.0;
}
// Check Array Bounds West
// Is current space occupied?
if(current_J > 0 && grid[current_I][current_J-1] < current_low && grid[current_I][current_J-1] != 1){
current_low = grid[current_I][current_J-1]; //Set next number
Next_I = current_I; //Set Next Direction as west
Next_J = current_J-1;
direction = 180.0;
}
// Check Array Bounds East
// Is current space occupied?
if(current_J < (10 - 1) && grid[current_I][current_J+1] < current_low && grid[current_I][current_J+1] != 1){
current_low = grid[current_I][current_J+1]; //Set next number
Next_I = current_I; //Set Next Direction as East
Next_J = current_J+1;
direction = 0.00;
}
// Okay - We know the number we're heading for, the direction and the coordinates.
destX = destX(Next_J);
destY = destY(Next_I);
// turn to direction of next tile
turnTo(direction, false);
try { Thread.sleep(500); } catch (InterruptedException e) {}
// check if obstacle is detected
int o1 = getFilteredData();
Delay.msDelay(50);
int o2 = getFilteredData();
Delay.msDelay(50);
int o3 = getFilteredData();
Delay.msDelay(50);
int o4 = getFilteredData();
Delay.msDelay(50);
int o5 = getFilteredData();
if(o1<35 && o2<35 && o3<35 && o4<35 && o5<35 && grid[Next_I][Next_J] != 1){
// stop the following travelTo method
obstacleDetected = true;
// set the obstacle on the Map grid
myMap.getGrid()[Next_I][Next_J] = Map.OBSTACLE;
// find destination location by grid
int finalJ = 0;
int finalI = 0;
for(int i=0; i < grid.length; i++){
for(int j=0; j < grid[i].length; j++){
if(grid[i][j] == 2){
finalI = i;
finalJ = j;
}
}
}
// recursion
navigateTo(myMap.destX(finalJ),myMap.destY(finalI));
// terminate this loop
current_low = 2;
}
// if no obstacle move to next block
if(!obstacleDetected){
travelTo2(destX, destY); // travel to next tile
current_I = Next_I; // update new I position for loop
current_J = Next_J; // update new J position for loop
}
}
}
|
diff --git a/Statics/src/edu/gatech/statics/modes/fbd/FBDChecker.java b/Statics/src/edu/gatech/statics/modes/fbd/FBDChecker.java
index c6f34865..346ec9bf 100644
--- a/Statics/src/edu/gatech/statics/modes/fbd/FBDChecker.java
+++ b/Statics/src/edu/gatech/statics/modes/fbd/FBDChecker.java
@@ -1,943 +1,943 @@
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package edu.gatech.statics.modes.fbd;
import edu.gatech.statics.application.StaticsApplication;
import edu.gatech.statics.exercise.Diagram;
import edu.gatech.statics.exercise.Exercise;
import edu.gatech.statics.math.AnchoredVector;
import edu.gatech.statics.math.Unit;
import edu.gatech.statics.math.Vector;
import edu.gatech.statics.math.Vector3bd;
import edu.gatech.statics.objects.Body;
import edu.gatech.statics.objects.Connector;
import edu.gatech.statics.objects.Load;
import edu.gatech.statics.objects.Measurement;
import edu.gatech.statics.objects.Point;
import edu.gatech.statics.objects.SimulationObject;
import edu.gatech.statics.objects.bodies.Cable;
import edu.gatech.statics.objects.bodies.TwoForceMember;
import edu.gatech.statics.objects.connectors.Connector2ForceMember2d;
import edu.gatech.statics.objects.connectors.Fix2d;
import edu.gatech.statics.objects.connectors.Pin2d;
import edu.gatech.statics.util.Pair;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
/**
*
* @author Calvin Ashmore
*/
public class FBDChecker {
private FreeBodyDiagram diagram;
//private Joint nextJoint;
//private boolean done = false;
private boolean verbose = true;
protected FreeBodyDiagram getDiagram() {
return diagram;
}
public FBDChecker(FreeBodyDiagram diagram) {
this.diagram = diagram;
}
/**
* Get all of the symbolic measurements in the schematic, for making sure their names
* do are not being used for AnchoredVectors.
* @return
*/
private List<Measurement> getSymbolicMeasurements() {
List<Measurement> m = new ArrayList<Measurement>();
for (SimulationObject obj : FreeBodyDiagram.getSchematic().allObjects()) {
if (obj instanceof Measurement && ((Measurement) obj).isSymbol()) {
m.add((Measurement) obj);
}
}
return m;
}
/**
* The verbose flag lets the checker know whether to report information on failure.
* Verbose output will report both information to the logger and to the advice box.
* @param enable
*/
public void setVerbose(boolean enable) {
verbose = enable;
}
/**
* Get all the points in the schematic, to check against for force names.
* @return
*/
private List<Point> getAllPoints() {
List<Point> m = new ArrayList<Point>();
for (SimulationObject obj : FreeBodyDiagram.getSchematic().allObjects()) {
if (obj instanceof Point) {
m.add((Point) obj);
}
}
return m;
}
/**
* Get the given AnchoredVectors that are present in the diagram.
* The givens are AnchoredVectors present in the schematic, and should be added to the diagram
* by the user in the FBD. Givens are first looked up in the symbol manager to
* see if a stored symbol has been used.
* @return
*/
private List<AnchoredVector> getGivenLoads() {
List<AnchoredVector> givenLoads = new ArrayList<AnchoredVector>();
// look through everything in the schematic, we want to pick out the loads
// on the correct bodies.
for (Body body : FreeBodyDiagram.getSchematic().allBodies()) {
if (diagram.getBodySubset().getBodies().contains(body)) {
for (SimulationObject obj : body.getAttachedObjects()) {
if (obj instanceof Load) {
Load given = (Load) obj;
// attempt to find an equivalent that might have been stored in the symbol manager.
AnchoredVector symbolEquivalent = Exercise.getExercise().getSymbolManager().getLoad(given.getAnchoredVector());
if (symbolEquivalent != null) {
givenLoads.add(symbolEquivalent);
} else {
givenLoads.add(given.getAnchoredVector());
}
}
}
}
}
return givenLoads;
}
private void logInfo(String info) {
if (verbose) {
Logger.getLogger("Statics").info(info);
}
}
private void setAdviceKey(String key, Object... parameters) {
if (verbose) {
StaticsApplication.getApp().setAdviceKey(key, parameters);
}
}
public boolean checkDiagram() {
//done = false;
// step 1: assemble a list of all the forces the user has added.
List<AnchoredVector> addedLoads = new ArrayList<AnchoredVector>(diagram.getCurrentState().getAddedLoads());
logInfo("check: user added AnchoredVectors: " + addedLoads);
if (addedLoads.size() <= 0) {
logInfo("check: diagram does not contain any AnchoredVectors");
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_add");
return false;
}
// step 2: for vectors that we can click on and add, ie, given added forces,
// make sure that the user has added all of them.
for (AnchoredVector given : getGivenLoads()) {
boolean ok = performGivenCheck(addedLoads, given);
if (!ok) {
return false;
}
}
// step 3: Make sure weights exist, and remove them from our addedForces.
for (Body body : diagram.getBodySubset().getBodies()) {
if (body.getWeight().getDiagramValue().floatValue() == 0) {
continue;
}
AnchoredVector weight = new AnchoredVector(
body.getCenterOfMassPoint(),
new Vector(Unit.force, Vector3bd.UNIT_Y.negate(),
new BigDecimal(body.getWeight().doubleValue())));
boolean ok = performWeightCheck(addedLoads, weight, body);
if (!ok) {
return false;
}
}
// Step 4: go through all the border connectors connecting this FBD to the external world,
// and check each AnchoredVector implied by the connector.
for (int i = 0; i < diagram.allObjects().size(); i++) {
SimulationObject obj = diagram.allObjects().get(i);
if (!(obj instanceof Connector)) {
continue;
}
Connector connector = (Connector) obj;
// find the body in this diagram to which the connector is attached.
Body body = null;
if (diagram.allBodies().contains(connector.getBody1())) {
body = connector.getBody1();
}
if (diagram.allBodies().contains(connector.getBody2())) {
body = connector.getBody2();
}
// ^ is java's XOR operator
// we want the joint IF it connects a body in the body list
// to a body that is not in the body list. This means xor.
if (!(diagram.getBodySubset().getBodies().contains(connector.getBody1()) ^
diagram.getBodySubset().getBodies().contains(connector.getBody2()))) {
continue;
}
// build a list of the AnchoredVectors at this point
List<AnchoredVector> userAnchoredVectorsAtConnector = new ArrayList<AnchoredVector>();
for (AnchoredVector AnchoredVector : addedLoads) {
if (AnchoredVector.getAnchor().equals(connector.getAnchor())) {
userAnchoredVectorsAtConnector.add(AnchoredVector);
}
}
logInfo("check: testing connector: " + connector);
// special case, userAnchoredVectorsAtConnector is empty:
if (userAnchoredVectorsAtConnector.isEmpty()) {
logInfo("check: have any forces been added");
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_joint_reaction", connector.connectorName(), connector.getAnchor().getLabelText());
return false;
}
// //this is trying to make sure two force members have the same values at either end
// if (body instanceof TwoForceMember) {
// List<AnchoredVector> userAnchoredVectorsAtOtherConnector = new ArrayList<AnchoredVector>();
// Connector con;
// if (((TwoForceMember) body).getConnector1() == connector) {
// con = ((TwoForceMember) body).getConnector2();
// } else {
// con = ((TwoForceMember) body).getConnector1();
// }
// for (AnchoredVector AnchoredVector : addedAnchoredVectors) {
// if (AnchoredVector.getAnchor().equals(con.getAnchor())) {
// userAnchoredVectorsAtOtherConnector.add(AnchoredVector);
// }
// }
// if (!userAnchoredVectorsAtConnector.get(0).getLabelText().equalsIgnoreCase(userAnchoredVectorsAtOtherConnector.get(0).getLabelText())) {
// logInfo("check: the user has given a 2ForceMember's AnchoredVectors different values");
// logInfo("check: FAILED");
// setAdviceKey("fbd_feedback_check_fail_2force_not_same");
// return false;
// }
// }
ConnectorCheckResult connectorResult = checkConnector(userAnchoredVectorsAtConnector, connector, body);
switch (connectorResult) {
case passed:
// okay, the check passed without complaint.
// The AnchoredVectors may still not be correct, but that will be tested afterwards.
// for now, continue normally.
break;
case inappropriateDirection:
// check for special case of 2FM:
logInfo("check: User added AnchoredVectors at " + connector.getAnchor().getName() + ": " + userAnchoredVectorsAtConnector);
logInfo("check: Was expecting: " + getReactionAnchoredVectors(connector, connector.getReactions(body)));
if (connector instanceof Connector2ForceMember2d) {
Connector2ForceMember2d connector2fm = (Connector2ForceMember2d) connector;
if (connector2fm.getMember() instanceof Cable) {
// special message for cables:
logInfo("check: user created a cable in compression at point " + connector.getAnchor().getName());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_joint_cable",
connector.getAnchor().getName(),
connector2fm.getMember());
return false;
}
} else {
// one of the directions is the wrong way, and it's not a cable this time
// it is probably a roller or something.
logInfo("check: AnchoredVectors have wrong direction at point " + connector.getAnchor().getName());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_some_reverse", connector.getAnchor().getName());
return false;
}
case somethingExtra:
// this particular check could be fine
// in some problems there are multiple connectors at one point (notably in frame problems)
// and this means that extra AnchoredVectors are okay. We check to see if multiple connectors are present,
// and if so, continue gracefully, as inapporpriate extra things will be checked at the end
// otherwise the check will continue to the next step, "missingSomething" where other conditions
// will be tested.
if (diagram.getConnectorsAtPoint(connector.getAnchor()).size() > 1) {
// continue on.
break;
}
case missingSomething:
// okay, if we are here then either something is missing, or something is extra.
// check against pins or rollers and see what happens.
logInfo("check: User added AnchoredVectors at " + connector.getAnchor().getName() + ": " + userAnchoredVectorsAtConnector);
logInfo("check: Was expecting: " + getReactionAnchoredVectors(connector, connector.getReactions(body)));
// check if this is mistaken for a pin
if (!connector.connectorName().equals("pin")) {
Pin2d testPin = new Pin2d(connector.getAnchor());
if (checkConnector(userAnchoredVectorsAtConnector, testPin, null) == ConnectorCheckResult.passed) {
logInfo("check: user wrongly created a pin at point " + connector.getAnchor().getLabelText());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_joint_wrong_type", connector.getAnchor().getLabelText(), "pin", connector.connectorName());
return false;
}
}
// check if this is mistaken for a fix
if (!connector.connectorName().equals("fix")) {
Fix2d testFix = new Fix2d(connector.getAnchor());
if (checkConnector(userAnchoredVectorsAtConnector, testFix, null) == ConnectorCheckResult.passed) {
logInfo("check: user wrongly created a fix at point " + connector.getAnchor().getLabelText());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_joint_wrong_type", connector.getAnchor().getLabelText(), "fix", connector.connectorName());
return false;
}
}
// otherwise, the user did something strange.
logInfo("check: user simply added reactions to a joint that don't make sense to point " + connector.getAnchor().getLabelText());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_joint_wrong", connector.connectorName(), connector.getAnchor().getLabelText());
return false;
}
// okay, now the connector test has passed.
// We know now that the AnchoredVectors present in the diagram satisfy the reactions for the connector.
// All reactions AnchoredVectors are necessarily symbolic, and thus will either be new symbols, or
// they will be present in the symbol manager.
List<AnchoredVector> expectedReactions = getReactionAnchoredVectors(connector, connector.getReactions(body));
for (AnchoredVector reaction : expectedReactions) {
// get a AnchoredVector and result corresponding to this check.
AnchoredVector loadFromSymbolManager = Exercise.getExercise().getSymbolManager().getLoad(reaction);
if (loadFromSymbolManager != null) {
// make sure the directions are pointing the correct way:
if (reaction.getVectorValue().equals(loadFromSymbolManager.getVectorValue().negate())) {
loadFromSymbolManager = new AnchoredVector(loadFromSymbolManager);
- loadFromSymbolManager.getVectorValue().negate();
+ loadFromSymbolManager.getVectorValue().negateLocal();
}
// of the user AnchoredVectors, only check those which point in maybe the right direction
List<AnchoredVector> userAnchoredVectorsAtConnectorInDirection = new ArrayList<AnchoredVector>();
for (AnchoredVector AnchoredVector : userAnchoredVectorsAtConnector) {
if (AnchoredVector.getVectorValue().equals(reaction.getVectorValue()) ||
AnchoredVector.getVectorValue().equals(reaction.getVectorValue().negate())) {
userAnchoredVectorsAtConnectorInDirection.add(AnchoredVector);
}
}
Pair<AnchoredVector, AnchoredVectorCheckResult> result = checkAllCandidatesAgainstTarget(
userAnchoredVectorsAtConnectorInDirection, loadFromSymbolManager);
AnchoredVector candidate = result.getLeft();
// this AnchoredVector has been solved for already. Now we can check against it.
if (result.getRight() == AnchoredVectorCheckResult.passed) {
// check is OK, we can remove the AnchoredVector from our addedAnchoredVectors.
addedLoads.remove(candidate);
} else {
complainAboutAnchoredVectorCheck(result.getRight(), candidate);
return false;
}
} else {
// this AnchoredVector is new, so it requires a name check.
// let's find a AnchoredVector that seems to match the expected reaction.
AnchoredVector candidate = null;
for (AnchoredVector possibleCandidate : userAnchoredVectorsAtConnector) {
// we know that these all are at the right anchor, so only test direction.
// direction may also be negated, since these are new symbols.
if (possibleCandidate.getVectorValue().equals(reaction.getVectorValue()) ||
possibleCandidate.getVectorValue().equals(reaction.getVectorValue().negate())) {
candidate = possibleCandidate;
}
}
// candidate should not be null at this point since the main test passed.
NameCheckResult nameResult;
if (connector instanceof Connector2ForceMember2d) {
nameResult = checkAnchoredVectorName2FM(candidate, (Connector2ForceMember2d) connector);
} else {
nameResult = checkLoadName(candidate);
}
if (nameResult == NameCheckResult.passed) {
// we're okay!!
addedLoads.remove(candidate);
} else {
complainAboutName(nameResult, candidate);
return false;
}
}
}
}
// Step 5: Make sure we've used all the user added forces.
if (!addedLoads.isEmpty()) {
logInfo("check: user added more forces than necessary: " + addedLoads);
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_additional", addedLoads.get(0).getAnchor().getName());
return false;
}
// Step 6: Verify labels
// verify that all unknowns are symbols
// these are reaction forces and moments
// knowns should not be symbols: externals, weights
// symbols must also not be repeated, unless this is valid somehow? (not yet)
// Yay, we've passed the test!
logInfo("check: PASSED!");
return true;
}
/**
* Checks against a given AnchoredVector.
* The check removes the candidate from addedAnchoredVectors if the check passes.
* @param addedAnchoredVectors
* @param given
* @return
*/
protected boolean performGivenCheck(List<AnchoredVector> addedAnchoredVectors, AnchoredVector given) {
List<AnchoredVector> candidates = getCandidates(addedAnchoredVectors, given, given.isSymbol() && !given.isKnown());
// try all candidates
// realistically there should only be one, but this check tries to be secure.
Pair<AnchoredVector, AnchoredVectorCheckResult> result = checkAllCandidatesAgainstTarget(candidates, given);
// we have no candidates, so terminate.
if (result.getRight() == null) {
//user has forgotten to add a given AnchoredVector
logInfo("check: diagram does not contain given AnchoredVector " + given);
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_given", given.getAnchor().getLabelText());
return false;
}
AnchoredVector candidate = result.getLeft();
// report failures
switch (result.getRight()) {
case passed:
// Our test has passed, we can continue.
addedAnchoredVectors.remove(candidate);
break;
case shouldNotBeNumeric:
//A given value that should be symbolic has been added as numeric
logInfo("check: external value should be a symbol at point" + given.getAnchor().getName());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_given_symbol", candidate.getQuantity().toString(), candidate.getAnchor().getLabelText());
return false;
case shouldNotBeSymbol:
//A given value that should be numeric has been added as symbolic
logInfo("check: external value should be a numeric at point" + given.getAnchor().getLabelText());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_given_number", candidate.getQuantity().toString(), candidate.getAnchor().getLabelText());
return false;
case wrongSymbol:
// user has given a symbol that does not match the symbol of the given AnchoredVector.
// this is generally okay, but we want there to be consistency if the user has already put a name down.
if (Exercise.getExercise().getSymbolManager().getLoad(candidate) == null) {
// we're okay
addedAnchoredVectors.remove(candidate);
break;
}
default:
complainAboutAnchoredVectorCheck(result.getRight(), candidate);
return false;
}
// user candidate is a symbolic value
if (candidate.isSymbol() && Exercise.getExercise().getSymbolManager().getLoad(candidate) == null) {
NameCheckResult nameResult = checkLoadName(candidate);
if (nameResult != NameCheckResult.passed) {
complainAboutName(nameResult, candidate);
return false;
}
}
return true;
}
/**
* Checks against a weight. This method is very similar to the Given check,
* but uses different log and feedback messages. A good way to do the check might be to abstract them out,
* but the difference is kind of immaterial at this point.
* The check removes the candidate from addedAnchoredVectors if the check passes.
* @param addedAnchoredVectors
* @param given
* @return
*/
protected boolean performWeightCheck(List<AnchoredVector> addedAnchoredVectors, AnchoredVector weight, Body body) {
List<AnchoredVector> candidates = getCandidates(addedAnchoredVectors, weight, weight.isSymbol() && !weight.isKnown());
// try all candidates
// realistically there should only be one, but this check tries to be secure.
Pair<AnchoredVector, AnchoredVectorCheckResult> result = checkAllCandidatesAgainstTarget(candidates, weight);
// we have no candidates, so terminate.
if (result.getRight() == null) {
// weight does not exist in system.
logInfo("check: diagram does not contain weight for " + body);
logInfo("check: weight is: " + weight);
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_weight", body.getName());
return false;
}
AnchoredVector candidate = result.getLeft();
// report failures
switch (result.getRight()) {
case passed:
// Our test has passed, we can continue.
addedAnchoredVectors.remove(candidate);
break;
case shouldNotBeNumeric:
//A given value that should be symbolic has been added as numeric
logInfo("check: weight should be a symbol at point" + weight.getAnchor().getName());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_weight_symbol", body.getName());
return false;
case shouldNotBeSymbol:
//A given value that should be numeric has been added as symbolic
logInfo("check: weight should be numeric at point" + weight.getAnchor().getName());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_weight_number", body.getName());
return false;
case wrongNumericValue:
// wrong numeric value
logInfo("check: diagram contains incorrect weight " + weight);
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_weight_value", body.getName());
return false;
default:
complainAboutAnchoredVectorCheck(result.getRight(), candidate);
return false;
}
// user candidate is a symbolic value
if (candidate.isSymbol() && Exercise.getExercise().getSymbolManager().getLoad(candidate) == null) {
NameCheckResult nameResult = checkLoadName(candidate);
if (nameResult != NameCheckResult.passed) {
complainAboutName(nameResult, candidate);
return false;
}
}
return true;
}
private void complainAboutAnchoredVectorCheck(AnchoredVectorCheckResult result, AnchoredVector candidate) {
switch (result) {
case shouldNotBeNumeric:
logInfo("check: force should not be numeric: " + candidate);
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_numeric", candidate.getUnit().toString(), candidate.getQuantity().toString(), candidate.getAnchor().getName());
return;
case shouldNotBeSymbol:
logInfo("check: force should not be symbol: " + candidate);
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_symbol", candidate.getUnit().toString(), candidate.getAnchor().getLabelText(), candidate.getAnchor().getName());
return;
case wrongNumericValue:
logInfo("check: numeric values do not match: " + candidate);
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_not_same_number", candidate.getUnit().toString(), candidate.getQuantity().toString(), candidate.getAnchor().getName());
return;
case wrongDirection:
logInfo("check: AnchoredVector is pointing the wrong direction: " + candidate);
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_reverse", candidate.getUnit().toString(), candidate.getQuantity().toString(), candidate.getAnchor().getName());
return;
case wrongSymbol:
//the student has created a AnchoredVector with a name that doesn't match its opposing force
logInfo("check: AnchoredVector should equal its opposite: " + candidate);
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_not_same_symbol", candidate.getUnit().toString(), candidate.getQuantity().toString(), candidate.getAnchor().getName());
return;
}
}
private void complainAboutName(NameCheckResult result, AnchoredVector candidate) {
switch (result) {
case duplicateInThisDiagram:
case matchesSymbolElsewhere:
logInfo("check: forces and moments should not have the same name as any other force or moment: " + candidate);
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_duplicate", candidate.getUnit().toString(), candidate.getAnchor().getLabelText());
return;
case matchesMeasurementSymbol:
logInfo("check: force or moment should not share the same name with an unknown measurement ");
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_duplicate_measurement", candidate.getUnit().toString(), candidate.getAnchor().getLabelText());
return;
case matchesPointName:
logInfo("check: anchors and added force/moments should not share names");
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_duplicate_anchor", candidate.getUnit().toString(), candidate.getAnchor().getLabelText());
return;
case shouldMatch2FM:
//the student has created a 2FM with non matching forces
logInfo("check: forces on a 2FM need to have the same name: " + candidate);
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_2force_not_same");
return;
}
}
protected enum NameCheckResult {
passed, // passed, no conficts
matchesSymbolElsewhere, // same as a symbolic AnchoredVector from another diagram
matchesPointName, // same as the name for a point
matchesMeasurementSymbol, // same as a symbol used in an unknown measurement
duplicateInThisDiagram, // two AnchoredVectors incorrectly have the same name in this diagram
shouldMatch2FM // the opposing forces of a 2FM should match
}
/**
* This check makes sure this AnchoredVector has a suitable name. The given candidate
* must be a symbolic AnchoredVector.
* This check is intended for AnchoredVectors which have *not yet* been added to the symbol manager.
* This check will go through and make sure the name does not coincide with that of a point or
* a different AnchoredVector in this diagram or in other diagrams. A special case must be
* made with Two Force Members, and for these it is necessary to use checkAnchoredVectorName2FM().
* @param candidate
* @return
*/
protected NameCheckResult checkLoadName(AnchoredVector candidate) {
String name = candidate.getSymbolName();
for (SimulationObject obj : Diagram.getSchematic().allObjects()) {
// look through simulation objects to find name conflicts
// first look at measurements
if (obj instanceof Measurement) {
Measurement measure = (Measurement) obj;
if (measure.isSymbol() && measure.getSymbolName().equalsIgnoreCase(name)) {
return NameCheckResult.matchesMeasurementSymbol;
}
}
// then points
if (obj instanceof Point) {
if (name.equalsIgnoreCase(obj.getName())) {
return NameCheckResult.matchesPointName;
}
}
}
// look through other symbols stored in the symbol manager
if (Exercise.getExercise().getSymbolManager().getSymbols().contains(name)) {
//the name exists elsewhere in the fbd
return NameCheckResult.matchesSymbolElsewhere;
}
// now look through other AnchoredVectors in this diagram.
for (AnchoredVector load : diagram.getCurrentState().getAddedLoads()) {
if (load.equals(candidate)) {
continue;
}
if (candidate.getSymbolName().equalsIgnoreCase(load.getSymbolName())) {
return NameCheckResult.duplicateInThisDiagram;
}
}
return NameCheckResult.passed;
}
/**
* This is an extension of the checkAnchoredVectorName() method, which applies specifically
* to AnchoredVectors which are reactions to Two Force Members. This method assumes that the candidate provided is
* actually the reaction force to the 2fm.
* @param candidate
* @param connector
* @return
*/
protected NameCheckResult checkAnchoredVectorName2FM(AnchoredVector candidate, Connector2ForceMember2d connector) {
NameCheckResult result = checkLoadName(candidate);
//if we passed, which we usually want, this means that the AnchoredVectors' labels
//do not match, which is bad
if (result == NameCheckResult.passed) {
for (SimulationObject obj : connector.getMember().getAttachedObjects()) {
if (!(obj instanceof Load)) {
continue;
}
if (connector.getMember().containsPoints(candidate.getAnchor(), ((Load) obj).getAnchor())) {
return NameCheckResult.shouldMatch2FM;
}
}
}
// if the result of the standard check is anything but "there is a duplicate in this diagram"
// then we can return that result. We are only interested in the case where there
// might be a second AnchoredVector with the same name, which implies a duplicate.
if (result != NameCheckResult.duplicateInThisDiagram) {
return result;
}
TwoForceMember member = connector.getMember();
if (!diagram.allBodies().contains(member)) {
return result;
}
Connector2ForceMember2d otherConnector;
if (member.getConnector1() == connector) {
otherConnector = member.getConnector2();
} else if (member.getConnector2() == connector) {
otherConnector = member.getConnector1();
} else {
// shouldn't get here, but fail gracefully
return result;
}
// get the other AnchoredVector that satisfies the reactions of the otherConnector
List<AnchoredVector> AnchoredVectorsAtOtherReaction = getLoadsAtPoint(otherConnector.getAnchor());
List<AnchoredVector> otherConnectorReactions = getReactionAnchoredVectors(connector, otherConnector.getReactions());
AnchoredVector otherReactionTarget = otherConnectorReactions.get(0);
AnchoredVector otherReaction = null;
// iterate through the list and look for the one that should do it.
// we want to find something that could be a reaction on the other end of the 2fm,
// and we want it to match the name of our current AnchoredVector.
for (AnchoredVector otherAnchoredVector : AnchoredVectorsAtOtherReaction) {
if (otherAnchoredVector.getVectorValue().equals(otherReactionTarget.getVectorValue()) ||
otherAnchoredVector.getVectorValue().equals(otherReactionTarget.getVectorValue().negate())) {
if (candidate.getSymbolName().equalsIgnoreCase(otherAnchoredVector.getSymbolName())) {
otherReaction = otherAnchoredVector;
}
}
}
// okay, we found it. That means that this AnchoredVector should have an appropriate name.
// in the case that there is some case that is invalid and escapes the above, it should
// be caught by other parts of the check (for instance, if the user was especially
// difficult and put two reactions at one end of a 2fm or something like that)
if (otherReaction != null) {
return NameCheckResult.passed;
}
return result;
}
/**
* A convenience method to get all of the loads at a given point. This goes through
* all of the loads in the current diagram and checks against them.
* @param point
* @return
*/
protected List<AnchoredVector> getLoadsAtPoint(Point point) {
List<AnchoredVector> loads = new ArrayList<AnchoredVector>();
//for (SimulationObject obj : allObjects) {
for (AnchoredVector load : diagram.getCurrentState().getAddedLoads()) {
if (load.getAnchor().equals(point)) {
loads.add(load);
}
}
return loads;
}
/**
* Attempts to find a AnchoredVector from a pool of possibilities which might match the target.
* The search will search for AnchoredVectors that are the same type, are at the same point, and
* point in the same direction as the target, or the opposite direction, if the
* testOpposites flag is checked.
* @param searchPool
* @param target
* @param testOpposites
* @return
*/
protected List<AnchoredVector> getCandidates(List<AnchoredVector> searchPool, AnchoredVector target, boolean testOpposites) {
List<AnchoredVector> candidates = new ArrayList<AnchoredVector>();
for (AnchoredVector AnchoredVector : searchPool) {
// make sure types are the same
if (AnchoredVector.getClass() != target.getClass()) {
continue;
}
// make sure the anchor is the same
if (!AnchoredVector.getAnchor().equals(target.getAnchor())) {
continue;
}
// add if direction is the same, or is opposite and the testOpposites flag is set
if (AnchoredVector.getVectorValue().equals(target.getVectorValue()) ||
(testOpposites && AnchoredVector.getVectorValue().negate().equals(target.getVectorValue()))) {
candidates.add(AnchoredVector);
}
}
return candidates;
}
/**
* This is a result that is returned when a AnchoredVector is checked against some stored version.
* This works when checking against a given, a weight, or a stored symbolic AnchoredVector.
*/
protected enum AnchoredVectorCheckResult {
passed, //check passes
wrongDirection, // occurs when solved value is in wrong direction
wrongSymbol, // symbol is stored, this should change its symbol
wrongNumericValue, // number is stored, user put in wrong value
shouldNotBeSymbol, // store is numeric
shouldNotBeNumeric // store is symbolic
}
/**
* Like checkAnchoredVectorAgainstTarget() this method aims to check whether a AnchoredVector matches the
* target. However, this method checks against a collection of candidates, rather than just one.
* @param candidates
* @param target
* @return
*/
protected Pair<AnchoredVector, AnchoredVectorCheckResult> checkAllCandidatesAgainstTarget(List<AnchoredVector> candidates, AnchoredVector target) {
AnchoredVectorCheckResult result = null;
AnchoredVector lastCandidate = null;
for (AnchoredVector candidate : candidates) {
lastCandidate = candidate;
result = checkAnchoredVectorAgainstTarget(candidate, target);
if (result == AnchoredVectorCheckResult.passed) {
return new Pair<AnchoredVector, AnchoredVectorCheckResult>(candidate, result);
}
}
return new Pair<AnchoredVector, AnchoredVectorCheckResult>(lastCandidate, result);
}
/**
* Returns a result indicating whether the candidate sufficiently matches the target provided.
* Target can be a stored symbolic AnchoredVector, a given, or a weight. The target could be known, symbolic, numeric,
* or what-have-you. The goal of this check is to abstract out some of the detailed checks making sure
* that candidates are named or valud appropriately and pointing the right direction given
* other information that might be known about other diagrams.
* @param candidate
* @param target
* @return
*/
protected AnchoredVectorCheckResult checkAnchoredVectorAgainstTarget(AnchoredVector candidate, AnchoredVector target) {
if (target.isKnown()) {
// target is a known AnchoredVector
// the numeric value must be correct, and the direction must be correct.
if (!candidate.isKnown()) {
// candidate is not known, so complain.
return AnchoredVectorCheckResult.shouldNotBeSymbol;
}
if (!candidate.getDiagramValue().equals(target.getDiagramValue())) {
// the numeric values are off.
return AnchoredVectorCheckResult.wrongNumericValue;
}
if (!candidate.getVectorValue().equals(target.getVectorValue())) {
// pointing the wrong way
return AnchoredVectorCheckResult.wrongDirection;
}
// this is sufficient for the AnchoredVector to be correct
return AnchoredVectorCheckResult.passed;
} else {
// target is unknown, it must be symbolic
// the symbol must be correct
if (candidate.isKnown()) {
// candidate is not symbolic, so complain
return AnchoredVectorCheckResult.shouldNotBeNumeric;
}
if (!candidate.getSymbolName().equalsIgnoreCase(target.getSymbolName())) {
// candidate has the wrong symbol name
return AnchoredVectorCheckResult.wrongSymbol;
}
// we should be okay now.
return AnchoredVectorCheckResult.passed;
}
}
/**
* This is a result of a check of a connector. This returns *no* information about
* whether the AnchoredVectors are appropriately valued or named, it merely returns information regarding whether
* the AnchoredVectors provided could work for the connector.
*/
protected enum ConnectorCheckResult {
passed, // ok
missingSomething, // some reaction is missing from the candidates
somethingExtra, // the candidates have an extra force that is not necessary
inappropriateDirection, // one or more of the candidates is the wrong direction for the connector
// (ie, in a 2 force member, or in a roller)
}
/**
* Checks to see whether candidateAnchoredVectors, the list of AnchoredVectors provided, is a suitable match for
* the reactions of the given connector. This does not check if the AnchoredVectors are named or have
* the correct names or symbols. This check assumes that all candidateAnchoredVectors are on the connector's anchor.
* The method will return ConnectorCheckResult.somethingExtra if everything is okay except for there being more
* AnchoredVectors than expected. Sometimes, this is okay, for instance if there are more than one connector at a point.
* The check method itself is responsible for identifying these situations and handling them appropriately.
* @param cadidateAnchoredVectors
* @param connector
* @param localBody
* @return
*/
protected ConnectorCheckResult checkConnector(List<AnchoredVector> candidateAnchoredVectors, Connector connector, Body localBody) {
List<Vector> reactions;
if (localBody == null) {
reactions = connector.getReactions();
} else {
reactions = connector.getReactions(localBody);
}
List<AnchoredVector> reactionAnchoredVectors = getReactionAnchoredVectors(connector, reactions);
boolean negatable = connector.isForceDirectionNegatable();
for (AnchoredVector reaction : reactionAnchoredVectors) {
// check each reaction AnchoredVector to make sure it is present and proper
List<AnchoredVector> candidates = getCandidates(candidateAnchoredVectors, reaction, negatable);
if (candidates.isEmpty()) {
// okay, this one is missing, which is bad.
if (!negatable && !getCandidates(candidateAnchoredVectors, reaction, true).isEmpty()) {
// candidates allowing negation is not empty, meaning that user is adding
// a AnchoredVector in the wrong direction
return ConnectorCheckResult.inappropriateDirection;
}
return ConnectorCheckResult.missingSomething;
}
}
// okay, all reactions are accounted for.
// if our list of candidateAnchoredVectors is larger, then there may be a problem
if (candidateAnchoredVectors.size() > reactionAnchoredVectors.size()) {
return ConnectorCheckResult.somethingExtra;
}
// otherwise, we're okay.
return ConnectorCheckResult.passed;
}
/**
* Returns the reactions present from a connector as AnchoredVectors instead of Vectors.
* This returns a fresh new list, so it does no harm to remove AnchoredVectors from it.
* @param joint
* @param reactions
* @return
*/
private List<AnchoredVector> getReactionAnchoredVectors(Connector connector, List<Vector> reactions) {
List<AnchoredVector> loads = new ArrayList<AnchoredVector>();
for (Vector vector : reactions) {
loads.add(new AnchoredVector(connector.getAnchor(), vector));
/*if (vector.getUnit() == Unit.force) {
AnchoredVectors.add(new Force(joint.getAnchor(), vector));
} else if (vector.getUnit() == Unit.moment) {
AnchoredVectors.add(new Moment(joint.getAnchor(), vector));
}*/
}
return loads;
}
}
| true
| true
|
public boolean checkDiagram() {
//done = false;
// step 1: assemble a list of all the forces the user has added.
List<AnchoredVector> addedLoads = new ArrayList<AnchoredVector>(diagram.getCurrentState().getAddedLoads());
logInfo("check: user added AnchoredVectors: " + addedLoads);
if (addedLoads.size() <= 0) {
logInfo("check: diagram does not contain any AnchoredVectors");
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_add");
return false;
}
// step 2: for vectors that we can click on and add, ie, given added forces,
// make sure that the user has added all of them.
for (AnchoredVector given : getGivenLoads()) {
boolean ok = performGivenCheck(addedLoads, given);
if (!ok) {
return false;
}
}
// step 3: Make sure weights exist, and remove them from our addedForces.
for (Body body : diagram.getBodySubset().getBodies()) {
if (body.getWeight().getDiagramValue().floatValue() == 0) {
continue;
}
AnchoredVector weight = new AnchoredVector(
body.getCenterOfMassPoint(),
new Vector(Unit.force, Vector3bd.UNIT_Y.negate(),
new BigDecimal(body.getWeight().doubleValue())));
boolean ok = performWeightCheck(addedLoads, weight, body);
if (!ok) {
return false;
}
}
// Step 4: go through all the border connectors connecting this FBD to the external world,
// and check each AnchoredVector implied by the connector.
for (int i = 0; i < diagram.allObjects().size(); i++) {
SimulationObject obj = diagram.allObjects().get(i);
if (!(obj instanceof Connector)) {
continue;
}
Connector connector = (Connector) obj;
// find the body in this diagram to which the connector is attached.
Body body = null;
if (diagram.allBodies().contains(connector.getBody1())) {
body = connector.getBody1();
}
if (diagram.allBodies().contains(connector.getBody2())) {
body = connector.getBody2();
}
// ^ is java's XOR operator
// we want the joint IF it connects a body in the body list
// to a body that is not in the body list. This means xor.
if (!(diagram.getBodySubset().getBodies().contains(connector.getBody1()) ^
diagram.getBodySubset().getBodies().contains(connector.getBody2()))) {
continue;
}
// build a list of the AnchoredVectors at this point
List<AnchoredVector> userAnchoredVectorsAtConnector = new ArrayList<AnchoredVector>();
for (AnchoredVector AnchoredVector : addedLoads) {
if (AnchoredVector.getAnchor().equals(connector.getAnchor())) {
userAnchoredVectorsAtConnector.add(AnchoredVector);
}
}
logInfo("check: testing connector: " + connector);
// special case, userAnchoredVectorsAtConnector is empty:
if (userAnchoredVectorsAtConnector.isEmpty()) {
logInfo("check: have any forces been added");
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_joint_reaction", connector.connectorName(), connector.getAnchor().getLabelText());
return false;
}
// //this is trying to make sure two force members have the same values at either end
// if (body instanceof TwoForceMember) {
// List<AnchoredVector> userAnchoredVectorsAtOtherConnector = new ArrayList<AnchoredVector>();
// Connector con;
// if (((TwoForceMember) body).getConnector1() == connector) {
// con = ((TwoForceMember) body).getConnector2();
// } else {
// con = ((TwoForceMember) body).getConnector1();
// }
// for (AnchoredVector AnchoredVector : addedAnchoredVectors) {
// if (AnchoredVector.getAnchor().equals(con.getAnchor())) {
// userAnchoredVectorsAtOtherConnector.add(AnchoredVector);
// }
// }
// if (!userAnchoredVectorsAtConnector.get(0).getLabelText().equalsIgnoreCase(userAnchoredVectorsAtOtherConnector.get(0).getLabelText())) {
// logInfo("check: the user has given a 2ForceMember's AnchoredVectors different values");
// logInfo("check: FAILED");
// setAdviceKey("fbd_feedback_check_fail_2force_not_same");
// return false;
// }
// }
ConnectorCheckResult connectorResult = checkConnector(userAnchoredVectorsAtConnector, connector, body);
switch (connectorResult) {
case passed:
// okay, the check passed without complaint.
// The AnchoredVectors may still not be correct, but that will be tested afterwards.
// for now, continue normally.
break;
case inappropriateDirection:
// check for special case of 2FM:
logInfo("check: User added AnchoredVectors at " + connector.getAnchor().getName() + ": " + userAnchoredVectorsAtConnector);
logInfo("check: Was expecting: " + getReactionAnchoredVectors(connector, connector.getReactions(body)));
if (connector instanceof Connector2ForceMember2d) {
Connector2ForceMember2d connector2fm = (Connector2ForceMember2d) connector;
if (connector2fm.getMember() instanceof Cable) {
// special message for cables:
logInfo("check: user created a cable in compression at point " + connector.getAnchor().getName());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_joint_cable",
connector.getAnchor().getName(),
connector2fm.getMember());
return false;
}
} else {
// one of the directions is the wrong way, and it's not a cable this time
// it is probably a roller or something.
logInfo("check: AnchoredVectors have wrong direction at point " + connector.getAnchor().getName());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_some_reverse", connector.getAnchor().getName());
return false;
}
case somethingExtra:
// this particular check could be fine
// in some problems there are multiple connectors at one point (notably in frame problems)
// and this means that extra AnchoredVectors are okay. We check to see if multiple connectors are present,
// and if so, continue gracefully, as inapporpriate extra things will be checked at the end
// otherwise the check will continue to the next step, "missingSomething" where other conditions
// will be tested.
if (diagram.getConnectorsAtPoint(connector.getAnchor()).size() > 1) {
// continue on.
break;
}
case missingSomething:
// okay, if we are here then either something is missing, or something is extra.
// check against pins or rollers and see what happens.
logInfo("check: User added AnchoredVectors at " + connector.getAnchor().getName() + ": " + userAnchoredVectorsAtConnector);
logInfo("check: Was expecting: " + getReactionAnchoredVectors(connector, connector.getReactions(body)));
// check if this is mistaken for a pin
if (!connector.connectorName().equals("pin")) {
Pin2d testPin = new Pin2d(connector.getAnchor());
if (checkConnector(userAnchoredVectorsAtConnector, testPin, null) == ConnectorCheckResult.passed) {
logInfo("check: user wrongly created a pin at point " + connector.getAnchor().getLabelText());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_joint_wrong_type", connector.getAnchor().getLabelText(), "pin", connector.connectorName());
return false;
}
}
// check if this is mistaken for a fix
if (!connector.connectorName().equals("fix")) {
Fix2d testFix = new Fix2d(connector.getAnchor());
if (checkConnector(userAnchoredVectorsAtConnector, testFix, null) == ConnectorCheckResult.passed) {
logInfo("check: user wrongly created a fix at point " + connector.getAnchor().getLabelText());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_joint_wrong_type", connector.getAnchor().getLabelText(), "fix", connector.connectorName());
return false;
}
}
// otherwise, the user did something strange.
logInfo("check: user simply added reactions to a joint that don't make sense to point " + connector.getAnchor().getLabelText());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_joint_wrong", connector.connectorName(), connector.getAnchor().getLabelText());
return false;
}
// okay, now the connector test has passed.
// We know now that the AnchoredVectors present in the diagram satisfy the reactions for the connector.
// All reactions AnchoredVectors are necessarily symbolic, and thus will either be new symbols, or
// they will be present in the symbol manager.
List<AnchoredVector> expectedReactions = getReactionAnchoredVectors(connector, connector.getReactions(body));
for (AnchoredVector reaction : expectedReactions) {
// get a AnchoredVector and result corresponding to this check.
AnchoredVector loadFromSymbolManager = Exercise.getExercise().getSymbolManager().getLoad(reaction);
if (loadFromSymbolManager != null) {
// make sure the directions are pointing the correct way:
if (reaction.getVectorValue().equals(loadFromSymbolManager.getVectorValue().negate())) {
loadFromSymbolManager = new AnchoredVector(loadFromSymbolManager);
loadFromSymbolManager.getVectorValue().negate();
}
// of the user AnchoredVectors, only check those which point in maybe the right direction
List<AnchoredVector> userAnchoredVectorsAtConnectorInDirection = new ArrayList<AnchoredVector>();
for (AnchoredVector AnchoredVector : userAnchoredVectorsAtConnector) {
if (AnchoredVector.getVectorValue().equals(reaction.getVectorValue()) ||
AnchoredVector.getVectorValue().equals(reaction.getVectorValue().negate())) {
userAnchoredVectorsAtConnectorInDirection.add(AnchoredVector);
}
}
Pair<AnchoredVector, AnchoredVectorCheckResult> result = checkAllCandidatesAgainstTarget(
userAnchoredVectorsAtConnectorInDirection, loadFromSymbolManager);
AnchoredVector candidate = result.getLeft();
// this AnchoredVector has been solved for already. Now we can check against it.
if (result.getRight() == AnchoredVectorCheckResult.passed) {
// check is OK, we can remove the AnchoredVector from our addedAnchoredVectors.
addedLoads.remove(candidate);
} else {
complainAboutAnchoredVectorCheck(result.getRight(), candidate);
return false;
}
} else {
// this AnchoredVector is new, so it requires a name check.
// let's find a AnchoredVector that seems to match the expected reaction.
AnchoredVector candidate = null;
for (AnchoredVector possibleCandidate : userAnchoredVectorsAtConnector) {
// we know that these all are at the right anchor, so only test direction.
// direction may also be negated, since these are new symbols.
if (possibleCandidate.getVectorValue().equals(reaction.getVectorValue()) ||
possibleCandidate.getVectorValue().equals(reaction.getVectorValue().negate())) {
candidate = possibleCandidate;
}
}
// candidate should not be null at this point since the main test passed.
NameCheckResult nameResult;
if (connector instanceof Connector2ForceMember2d) {
nameResult = checkAnchoredVectorName2FM(candidate, (Connector2ForceMember2d) connector);
} else {
nameResult = checkLoadName(candidate);
}
if (nameResult == NameCheckResult.passed) {
// we're okay!!
addedLoads.remove(candidate);
} else {
complainAboutName(nameResult, candidate);
return false;
}
}
}
}
// Step 5: Make sure we've used all the user added forces.
if (!addedLoads.isEmpty()) {
logInfo("check: user added more forces than necessary: " + addedLoads);
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_additional", addedLoads.get(0).getAnchor().getName());
return false;
}
// Step 6: Verify labels
// verify that all unknowns are symbols
// these are reaction forces and moments
// knowns should not be symbols: externals, weights
// symbols must also not be repeated, unless this is valid somehow? (not yet)
// Yay, we've passed the test!
logInfo("check: PASSED!");
return true;
}
|
public boolean checkDiagram() {
//done = false;
// step 1: assemble a list of all the forces the user has added.
List<AnchoredVector> addedLoads = new ArrayList<AnchoredVector>(diagram.getCurrentState().getAddedLoads());
logInfo("check: user added AnchoredVectors: " + addedLoads);
if (addedLoads.size() <= 0) {
logInfo("check: diagram does not contain any AnchoredVectors");
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_add");
return false;
}
// step 2: for vectors that we can click on and add, ie, given added forces,
// make sure that the user has added all of them.
for (AnchoredVector given : getGivenLoads()) {
boolean ok = performGivenCheck(addedLoads, given);
if (!ok) {
return false;
}
}
// step 3: Make sure weights exist, and remove them from our addedForces.
for (Body body : diagram.getBodySubset().getBodies()) {
if (body.getWeight().getDiagramValue().floatValue() == 0) {
continue;
}
AnchoredVector weight = new AnchoredVector(
body.getCenterOfMassPoint(),
new Vector(Unit.force, Vector3bd.UNIT_Y.negate(),
new BigDecimal(body.getWeight().doubleValue())));
boolean ok = performWeightCheck(addedLoads, weight, body);
if (!ok) {
return false;
}
}
// Step 4: go through all the border connectors connecting this FBD to the external world,
// and check each AnchoredVector implied by the connector.
for (int i = 0; i < diagram.allObjects().size(); i++) {
SimulationObject obj = diagram.allObjects().get(i);
if (!(obj instanceof Connector)) {
continue;
}
Connector connector = (Connector) obj;
// find the body in this diagram to which the connector is attached.
Body body = null;
if (diagram.allBodies().contains(connector.getBody1())) {
body = connector.getBody1();
}
if (diagram.allBodies().contains(connector.getBody2())) {
body = connector.getBody2();
}
// ^ is java's XOR operator
// we want the joint IF it connects a body in the body list
// to a body that is not in the body list. This means xor.
if (!(diagram.getBodySubset().getBodies().contains(connector.getBody1()) ^
diagram.getBodySubset().getBodies().contains(connector.getBody2()))) {
continue;
}
// build a list of the AnchoredVectors at this point
List<AnchoredVector> userAnchoredVectorsAtConnector = new ArrayList<AnchoredVector>();
for (AnchoredVector AnchoredVector : addedLoads) {
if (AnchoredVector.getAnchor().equals(connector.getAnchor())) {
userAnchoredVectorsAtConnector.add(AnchoredVector);
}
}
logInfo("check: testing connector: " + connector);
// special case, userAnchoredVectorsAtConnector is empty:
if (userAnchoredVectorsAtConnector.isEmpty()) {
logInfo("check: have any forces been added");
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_joint_reaction", connector.connectorName(), connector.getAnchor().getLabelText());
return false;
}
// //this is trying to make sure two force members have the same values at either end
// if (body instanceof TwoForceMember) {
// List<AnchoredVector> userAnchoredVectorsAtOtherConnector = new ArrayList<AnchoredVector>();
// Connector con;
// if (((TwoForceMember) body).getConnector1() == connector) {
// con = ((TwoForceMember) body).getConnector2();
// } else {
// con = ((TwoForceMember) body).getConnector1();
// }
// for (AnchoredVector AnchoredVector : addedAnchoredVectors) {
// if (AnchoredVector.getAnchor().equals(con.getAnchor())) {
// userAnchoredVectorsAtOtherConnector.add(AnchoredVector);
// }
// }
// if (!userAnchoredVectorsAtConnector.get(0).getLabelText().equalsIgnoreCase(userAnchoredVectorsAtOtherConnector.get(0).getLabelText())) {
// logInfo("check: the user has given a 2ForceMember's AnchoredVectors different values");
// logInfo("check: FAILED");
// setAdviceKey("fbd_feedback_check_fail_2force_not_same");
// return false;
// }
// }
ConnectorCheckResult connectorResult = checkConnector(userAnchoredVectorsAtConnector, connector, body);
switch (connectorResult) {
case passed:
// okay, the check passed without complaint.
// The AnchoredVectors may still not be correct, but that will be tested afterwards.
// for now, continue normally.
break;
case inappropriateDirection:
// check for special case of 2FM:
logInfo("check: User added AnchoredVectors at " + connector.getAnchor().getName() + ": " + userAnchoredVectorsAtConnector);
logInfo("check: Was expecting: " + getReactionAnchoredVectors(connector, connector.getReactions(body)));
if (connector instanceof Connector2ForceMember2d) {
Connector2ForceMember2d connector2fm = (Connector2ForceMember2d) connector;
if (connector2fm.getMember() instanceof Cable) {
// special message for cables:
logInfo("check: user created a cable in compression at point " + connector.getAnchor().getName());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_joint_cable",
connector.getAnchor().getName(),
connector2fm.getMember());
return false;
}
} else {
// one of the directions is the wrong way, and it's not a cable this time
// it is probably a roller or something.
logInfo("check: AnchoredVectors have wrong direction at point " + connector.getAnchor().getName());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_some_reverse", connector.getAnchor().getName());
return false;
}
case somethingExtra:
// this particular check could be fine
// in some problems there are multiple connectors at one point (notably in frame problems)
// and this means that extra AnchoredVectors are okay. We check to see if multiple connectors are present,
// and if so, continue gracefully, as inapporpriate extra things will be checked at the end
// otherwise the check will continue to the next step, "missingSomething" where other conditions
// will be tested.
if (diagram.getConnectorsAtPoint(connector.getAnchor()).size() > 1) {
// continue on.
break;
}
case missingSomething:
// okay, if we are here then either something is missing, or something is extra.
// check against pins or rollers and see what happens.
logInfo("check: User added AnchoredVectors at " + connector.getAnchor().getName() + ": " + userAnchoredVectorsAtConnector);
logInfo("check: Was expecting: " + getReactionAnchoredVectors(connector, connector.getReactions(body)));
// check if this is mistaken for a pin
if (!connector.connectorName().equals("pin")) {
Pin2d testPin = new Pin2d(connector.getAnchor());
if (checkConnector(userAnchoredVectorsAtConnector, testPin, null) == ConnectorCheckResult.passed) {
logInfo("check: user wrongly created a pin at point " + connector.getAnchor().getLabelText());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_joint_wrong_type", connector.getAnchor().getLabelText(), "pin", connector.connectorName());
return false;
}
}
// check if this is mistaken for a fix
if (!connector.connectorName().equals("fix")) {
Fix2d testFix = new Fix2d(connector.getAnchor());
if (checkConnector(userAnchoredVectorsAtConnector, testFix, null) == ConnectorCheckResult.passed) {
logInfo("check: user wrongly created a fix at point " + connector.getAnchor().getLabelText());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_joint_wrong_type", connector.getAnchor().getLabelText(), "fix", connector.connectorName());
return false;
}
}
// otherwise, the user did something strange.
logInfo("check: user simply added reactions to a joint that don't make sense to point " + connector.getAnchor().getLabelText());
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_joint_wrong", connector.connectorName(), connector.getAnchor().getLabelText());
return false;
}
// okay, now the connector test has passed.
// We know now that the AnchoredVectors present in the diagram satisfy the reactions for the connector.
// All reactions AnchoredVectors are necessarily symbolic, and thus will either be new symbols, or
// they will be present in the symbol manager.
List<AnchoredVector> expectedReactions = getReactionAnchoredVectors(connector, connector.getReactions(body));
for (AnchoredVector reaction : expectedReactions) {
// get a AnchoredVector and result corresponding to this check.
AnchoredVector loadFromSymbolManager = Exercise.getExercise().getSymbolManager().getLoad(reaction);
if (loadFromSymbolManager != null) {
// make sure the directions are pointing the correct way:
if (reaction.getVectorValue().equals(loadFromSymbolManager.getVectorValue().negate())) {
loadFromSymbolManager = new AnchoredVector(loadFromSymbolManager);
loadFromSymbolManager.getVectorValue().negateLocal();
}
// of the user AnchoredVectors, only check those which point in maybe the right direction
List<AnchoredVector> userAnchoredVectorsAtConnectorInDirection = new ArrayList<AnchoredVector>();
for (AnchoredVector AnchoredVector : userAnchoredVectorsAtConnector) {
if (AnchoredVector.getVectorValue().equals(reaction.getVectorValue()) ||
AnchoredVector.getVectorValue().equals(reaction.getVectorValue().negate())) {
userAnchoredVectorsAtConnectorInDirection.add(AnchoredVector);
}
}
Pair<AnchoredVector, AnchoredVectorCheckResult> result = checkAllCandidatesAgainstTarget(
userAnchoredVectorsAtConnectorInDirection, loadFromSymbolManager);
AnchoredVector candidate = result.getLeft();
// this AnchoredVector has been solved for already. Now we can check against it.
if (result.getRight() == AnchoredVectorCheckResult.passed) {
// check is OK, we can remove the AnchoredVector from our addedAnchoredVectors.
addedLoads.remove(candidate);
} else {
complainAboutAnchoredVectorCheck(result.getRight(), candidate);
return false;
}
} else {
// this AnchoredVector is new, so it requires a name check.
// let's find a AnchoredVector that seems to match the expected reaction.
AnchoredVector candidate = null;
for (AnchoredVector possibleCandidate : userAnchoredVectorsAtConnector) {
// we know that these all are at the right anchor, so only test direction.
// direction may also be negated, since these are new symbols.
if (possibleCandidate.getVectorValue().equals(reaction.getVectorValue()) ||
possibleCandidate.getVectorValue().equals(reaction.getVectorValue().negate())) {
candidate = possibleCandidate;
}
}
// candidate should not be null at this point since the main test passed.
NameCheckResult nameResult;
if (connector instanceof Connector2ForceMember2d) {
nameResult = checkAnchoredVectorName2FM(candidate, (Connector2ForceMember2d) connector);
} else {
nameResult = checkLoadName(candidate);
}
if (nameResult == NameCheckResult.passed) {
// we're okay!!
addedLoads.remove(candidate);
} else {
complainAboutName(nameResult, candidate);
return false;
}
}
}
}
// Step 5: Make sure we've used all the user added forces.
if (!addedLoads.isEmpty()) {
logInfo("check: user added more forces than necessary: " + addedLoads);
logInfo("check: FAILED");
setAdviceKey("fbd_feedback_check_fail_additional", addedLoads.get(0).getAnchor().getName());
return false;
}
// Step 6: Verify labels
// verify that all unknowns are symbols
// these are reaction forces and moments
// knowns should not be symbols: externals, weights
// symbols must also not be repeated, unless this is valid somehow? (not yet)
// Yay, we've passed the test!
logInfo("check: PASSED!");
return true;
}
|
diff --git a/cli/src/main/java/com/vmware/bdd/cli/commands/ClusterCommands.java b/cli/src/main/java/com/vmware/bdd/cli/commands/ClusterCommands.java
index 519051df..a22396b2 100644
--- a/cli/src/main/java/com/vmware/bdd/cli/commands/ClusterCommands.java
+++ b/cli/src/main/java/com/vmware/bdd/cli/commands/ClusterCommands.java
@@ -1,1230 +1,1231 @@
/*****************************************************************************
* Copyright (c) 2012 VMware, Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
****************************************************************************/
package com.vmware.bdd.cli.commands;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import jline.ConsoleReader;
import org.apache.hadoop.conf.Configuration;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.hadoop.impala.hive.HiveCommands;
import org.springframework.shell.core.CommandMarker;
import org.springframework.shell.core.annotation.CliAvailabilityIndicator;
import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
import org.springframework.stereotype.Component;
import com.vmware.bdd.apitypes.Cluster.ClusterType;
import com.vmware.bdd.apitypes.ClusterCreate;
import com.vmware.bdd.apitypes.ClusterRead;
import com.vmware.bdd.apitypes.DistroRead;
import com.vmware.bdd.apitypes.NetworkRead;
import com.vmware.bdd.apitypes.NodeGroupCreate;
import com.vmware.bdd.apitypes.NodeGroupRead;
import com.vmware.bdd.apitypes.NodeRead;
import com.vmware.bdd.apitypes.TopologyType;
import com.vmware.bdd.cli.rest.CliRestException;
import com.vmware.bdd.cli.rest.ClusterRestClient;
import com.vmware.bdd.cli.rest.DistroRestClient;
import com.vmware.bdd.cli.rest.NetworkRestClient;
import com.vmware.bdd.utils.AppConfigValidationUtils;
import com.vmware.bdd.utils.AppConfigValidationUtils.ValidationType;
import com.vmware.bdd.utils.ValidateResult;
@Component
public class ClusterCommands implements CommandMarker {
@Autowired
private DistroRestClient distroRestClient;
@Autowired
private NetworkRestClient networkRestClient;
@Autowired
private ClusterRestClient restClient;
@Autowired
private Configuration hadoopConfiguration;
@Autowired
private HiveCommands hiveCommands;
private String hiveInfo;
private String targetClusterName;
private boolean alwaysAnswerYes;
//define role of the node group .
private enum NodeGroupRole {
MASTER, WORKER, CLIENT, NONE
}
@CliAvailabilityIndicator({ "cluster help" })
public boolean isCommandAvailable() {
return true;
}
@CliCommand(value = "cluster create", help = "Create a hadoop cluster")
public void createCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name,
@CliOption(key = { "distro" }, mandatory = false, help = "Hadoop Distro") final String distro,
@CliOption(key = { "specFile" }, mandatory = false, help = "The spec file name path") final String specFilePath,
@CliOption(key = { "rpNames" }, mandatory = false, help = "Resource Pools for the cluster: use \",\" among names.") final String rpNames,
@CliOption(key = { "dsNames" }, mandatory = false, help = "Datastores for the cluster: use \",\" among names.") final String dsNames,
@CliOption(key = { "networkName" }, mandatory = false, help = "Network Name") final String networkName,
@CliOption(key = { "topology" }, mandatory = false, help = "Please specify the topology type: HVE or RACK_HOST or HOST_AS_RACK") final String topology,
@CliOption(key = { "resume" }, mandatory = false, specifiedDefaultValue = "true", unspecifiedDefaultValue = "false", help = "flag to resume cluster creation") final boolean resume,
@CliOption(key = { "skipConfigValidation" }, mandatory = false, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = "Skip cluster configuration validation. ") final boolean skipConfigValidation,
@CliOption(key = { "yes" }, mandatory = false, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = "Answer 'yes' to all Y/N questions. ") final boolean alwaysAnswerYes) {
this.alwaysAnswerYes = alwaysAnswerYes;
//validate the name
if (name.indexOf("-") != -1) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_CREATE, Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_CLUSTER
+ Constants.PARAM_NOT_CONTAIN_HORIZONTAL_LINE);
return;
}
//process resume
if (resume) {
resumeCreateCluster(name);
return;
}
// build ClusterCreate object
ClusterCreate clusterCreate = new ClusterCreate();
clusterCreate.setName(name);
if (topology != null) {
try {
clusterCreate.setTopologyPolicy(TopologyType.valueOf(topology));
} catch (IllegalArgumentException ex) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_CREATE, Constants.OUTPUT_OP_RESULT_FAIL,
Constants.INVALID_VALUE + " " + "topologyType=" + topology);
return;
}
} else {
clusterCreate.setTopologyPolicy(null);
}
if (distro != null) {
List<String> distroNames = getDistroNames();
if (validName(distro, distroNames)) {
clusterCreate.setDistro(distro);
} else {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL, Constants.PARAM_DISTRO
+ Constants.PARAM_NOT_SUPPORTED + distroNames);
return;
}
}
clusterCreate.setType(Enum.valueOf(ClusterType.class, "HADOOP"));
if (rpNames != null) {
List<String> rpNamesList = CommandsUtils.inputsConvert(rpNames);
if (rpNamesList.isEmpty()) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.INPUT_RPNAMES_PARAM + Constants.MULTI_INPUTS_CHECK);
return;
} else {
clusterCreate.setRpNames(rpNamesList);
}
}
if (dsNames != null) {
List<String> dsNamesList = CommandsUtils.inputsConvert(dsNames);
if (dsNamesList.isEmpty()) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.INPUT_DSNAMES_PARAM + Constants.MULTI_INPUTS_CHECK);
return;
} else {
clusterCreate.setDsNames(dsNamesList);
}
}
List<String> warningMsgList = new ArrayList<String>();
List<String> networkNames = null;
try {
if (specFilePath != null) {
ClusterCreate clusterSpec =
CommandsUtils.getObjectByJsonString(ClusterCreate.class, CommandsUtils.dataFromFile(specFilePath));
clusterCreate.setExternalHDFS(clusterSpec.getExternalHDFS());
clusterCreate.setNodeGroups(clusterSpec.getNodeGroups());
clusterCreate.setConfiguration(clusterSpec.getConfiguration());
validateConfiguration(clusterCreate, skipConfigValidation, warningMsgList);
if (!validateHAInfo(clusterCreate.getNodeGroups())){
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_CLUSTER_SPEC_HA_ERROR + specFilePath);
return;
}
}
networkNames = getNetworkNames();
} catch (Exception e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
return;
}
if (networkNames.isEmpty()) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_CREATE, Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_NETWORK_NAME + Constants.PARAM_NOT_EXISTED);
return;
} else {
if (networkName != null) {
if (validName(networkName, networkNames)) {
clusterCreate.setNetworkName(networkName);
} else {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_NETWORK_NAME
+ Constants.PARAM_NOT_SUPPORTED + networkNames);
return;
}
} else {
if (networkNames.size() == 1) {
clusterCreate.setNetworkName(networkNames.get(0));
} else {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_NETWORK_NAME
+ Constants.PARAM_NOT_SPECIFIED);
return;
}
}
}
// Validate that the specified file is correct json format and proper value.
if (specFilePath != null) {
if (!validateClusterCreate(clusterCreate)) {
return;
}
}
// process topology option
if (topology == null) {
clusterCreate.setTopologyPolicy(TopologyType.NONE);
} else {
try {
clusterCreate.setTopologyPolicy(TopologyType.valueOf(topology));
} catch (IllegalArgumentException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL, Constants.INPUT_TOPOLOGY_INVALID_VALUE);
System.out.println("Please specify the topology type: HVE or RACK_HOST or HOST_AS_RACK");
+ return;
}
}
// rest invocation
try {
if (!showWarningMsg(clusterCreate.getName(), warningMsgList)) {
return;
}
restClient.create(clusterCreate);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_RESULT_CREAT);
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
}
}
@CliCommand(value = "cluster list", help = "Get cluster information")
public void getCluster(
@CliOption(key = { "name" }, mandatory = false, help = "The cluster name") final String name,
@CliOption(key = { "detail" }, mandatory = false, specifiedDefaultValue = "true", unspecifiedDefaultValue = "false", help = "flag to show node information") final boolean detail) {
// rest invocation
try {
if (name == null) {
ClusterRead[] clusters = restClient.getAll();
if (clusters != null) {
prettyOutputClustersInfo(clusters, detail);
}
} else {
ClusterRead cluster = restClient.get(name);
if (cluster != null) {
prettyOutputClusterInfo(cluster, detail);
}
}
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_LIST, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
@CliCommand(value = "cluster export --spec", help = "Export cluster specification")
public void exportClusterSpec(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name,
@CliOption(key = { "output" }, mandatory = false, help = "The output file name") final String fileName) {
// rest invocation
try {
ClusterCreate cluster = restClient.getSpec(name);
if (cluster != null) {
CommandsUtils.prettyJsonOutput(cluster, fileName);
}
} catch (Exception e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_EXPORT, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
@CliCommand(value = "cluster delete", help = "Delete a cluster")
public void deleteCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name) {
//rest invocation
try {
restClient.delete(name);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_RESULT_DELETE);
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_DELETE, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
@CliCommand(value = "cluster start", help = "Start a cluster")
public void startCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String clusterName,
@CliOption(key = { "nodeGroupName" }, mandatory = false, help = "The node group name") final String nodeGroupName,
@CliOption(key = { "nodeName" }, mandatory = false, help = "The node name") final String nodeName) {
Map<String, String> queryStrings = new HashMap<String, String>();
queryStrings
.put(Constants.QUERY_ACTION_KEY, Constants.QUERY_ACTION_START);
//rest invocation
try {
if (!validateNodeGroupName(nodeGroupName)) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_START, Constants.OUTPUT_OP_RESULT_FAIL,
"invalid node group name");
return;
}
if (!validateNodeName(clusterName, nodeGroupName, nodeName)) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_START, Constants.OUTPUT_OP_RESULT_FAIL,
"invalid node name");
return;
}
String groupName = nodeGroupName;
String fullNodeName = nodeName;
if (nodeName != null) {
if (nodeGroupName == null) {
groupName = extractNodeGroupName(nodeName);
if (groupName == null) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_START, Constants.OUTPUT_OP_RESULT_FAIL,
"missing node group name");
return;
}
} else {
fullNodeName = autoCompleteNodeName(clusterName, nodeGroupName, nodeName);
}
}
String resource = getClusterResourceName(clusterName, groupName, fullNodeName);
if (resource != null) {
restClient.actionOps(resource, clusterName, queryStrings);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_RESULT_START);
}
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_START, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
@CliCommand(value = "cluster stop", help = "Stop a cluster")
public void stopCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String clusterName,
@CliOption(key = { "nodeGroupName" }, mandatory = false, help = "The node group name") final String nodeGroupName,
@CliOption(key = { "nodeName" }, mandatory = false, help = "The node name") final String nodeName) {
Map<String, String> queryStrings = new HashMap<String, String>();
queryStrings.put(Constants.QUERY_ACTION_KEY, Constants.QUERY_ACTION_STOP);
//rest invocation
try {
if (!validateNodeGroupName(nodeGroupName)) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_STOP, Constants.OUTPUT_OP_RESULT_FAIL,
"invalid node group name");
return;
}
if (!validateNodeName(clusterName, nodeGroupName, nodeName)) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_STOP, Constants.OUTPUT_OP_RESULT_FAIL,
"invalid node name");
return;
}
String groupName = nodeGroupName;
String fullNodeName = nodeName;
if (nodeName != null) {
if (nodeGroupName == null) {
groupName = extractNodeGroupName(nodeName);
if (groupName == null) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_STOP, Constants.OUTPUT_OP_RESULT_FAIL,
"missing node group name");
return;
}
} else {
fullNodeName = autoCompleteNodeName(clusterName, nodeGroupName, nodeName);
}
}
String resource = getClusterResourceName(clusterName, groupName, fullNodeName);
if (resource != null) {
restClient.actionOps(resource, clusterName, queryStrings);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_RESULT_STOP);
}
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_STOP, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
@CliCommand(value = "cluster resize", help = "Resize a cluster")
public void resizeCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name,
@CliOption(key = { "nodeGroup" }, mandatory = true, help = "The node group name") final String nodeGroup,
@CliOption(key = { "instanceNum" }, mandatory = true, help = "The resized number of instances. It should be larger that existing one") final int instanceNum) {
if (instanceNum > 1) {
try {
restClient.resize(name, nodeGroup, instanceNum);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_RESULT_RESIZE);
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_RESIZE,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
}
} else {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_RESIZE, Constants.OUTPUT_OP_RESULT_FAIL,
Constants.INVALID_VALUE + " instanceNum=" + instanceNum);
}
}
@CliCommand(value = "cluster target", help = "Set or query target cluster to run commands")
public void targetCluster(
@CliOption(key = { "name" }, mandatory = false, help = "The cluster name") final String name,
@CliOption(key = { "info" }, mandatory = false, specifiedDefaultValue = "true", unspecifiedDefaultValue = "false", help = "flag to show target information") final boolean info) {
ClusterRead cluster = null;
try {
if (info) {
if (name != null) {
System.out.println("Warning: can't specify option --name and --info at the same time");
return;
}
String fsUrl = hadoopConfiguration.get("fs.default.name");
String jtUrl = hadoopConfiguration.get("mapred.job.tracker");
if ((fsUrl == null || fsUrl.length() == 0) && (jtUrl == null || jtUrl.length() == 0)) {
System.out.println("There is no targeted cluster. Please use \"cluster target --name\" to target first");
return;
}
if(targetClusterName != null && targetClusterName.length() > 0){
System.out.println("Cluster : " + targetClusterName);
}
if (fsUrl != null && fsUrl.length() > 0) {
System.out.println("HDFS url : " + fsUrl);
}
if (jtUrl != null && jtUrl.length() > 0) {
System.out.println("Job Tracker url : " + jtUrl);
}
if (hiveInfo != null && hiveInfo.length() > 0) {
System.out.println("Hive server info: " + hiveInfo);
}
} else {
if (name == null) {
ClusterRead[] clusters = restClient.getAll();
if (clusters != null && clusters.length > 0) {
cluster = clusters[0];
}
} else {
cluster = restClient.get(name);
}
if (cluster == null) {
System.out.println("Failed to target cluster: The cluster " + name + "is not found");
setFsURL("");
setJobTrackerURL("");
this.setHiveServer("");
} else {
targetClusterName = cluster.getName();
for (NodeGroupRead nodeGroup : cluster.getNodeGroups()) {
for (String role : nodeGroup.getRoles()) {
if (role.equals("hadoop_namenode")) {
List<NodeRead> nodes = nodeGroup.getInstances();
if (nodes != null && nodes.size() > 0) {
String nameNodeIP = nodes.get(0).getIp();
setNameNode(nameNodeIP);
} else {
throw new CliRestException("no name node available");
}
}
if (role.equals("hadoop_jobtracker")) {
List<NodeRead> nodes = nodeGroup.getInstances();
if (nodes != null && nodes.size() > 0) {
String jobTrackerIP = nodes.get(0).getIp();
setJobTracker(jobTrackerIP);
} else {
throw new CliRestException("no job tracker available");
}
}
if (role.equals("hive_server")) {
List<NodeRead> nodes = nodeGroup.getInstances();
if (nodes != null && nodes.size() > 0) {
String hiveServerIP = nodes.get(0).getIp();
setHiveServer(hiveServerIP);
} else {
throw new CliRestException("no hive server available");
}
}
}
}
if (cluster.getExternalHDFS() != null && !cluster.getExternalHDFS().isEmpty()) {
setFsURL(cluster.getExternalHDFS());
}
}
}
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_TARGET,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
setFsURL("");
setJobTrackerURL("");
this.setHiveServer("");
}
}
private void setNameNode(String nameNodeAddress) {
String hdfsUrl = "hdfs://" + nameNodeAddress + ":8020";
setFsURL(hdfsUrl);
}
private void setFsURL(String fsURL) {
hadoopConfiguration.set("fs.default.name", fsURL);
}
private void setJobTracker(String jobTrackerAddress) {
String jobTrackerUrl = jobTrackerAddress + ":8021";
setJobTrackerURL(jobTrackerUrl);
}
private void setJobTrackerURL(String jobTrackerUrl){
hadoopConfiguration.set("mapred.job.tracker", jobTrackerUrl);
}
private void setHiveServer(String hiveServerAddress) {
try {
hiveInfo = hiveCommands.config(hiveServerAddress, 10000, null);
} catch (Exception e) {
throw new CliRestException("faild to set hive server address");
}
}
@CliCommand(value = "cluster config", help = "Config an existing cluster")
public void configCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name,
@CliOption(key = { "specFile" }, mandatory = true, help = "The spec file name path") final String specFilePath,
@CliOption(key = { "skipConfigValidation" }, mandatory = false, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = "Skip cluster configuration validation. ") final boolean skipConfigValidation,
@CliOption(key = { "yes" }, mandatory = false, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = "Answer 'yes' to all Y/N questions. ") final boolean alwaysAnswerYes) {
this.alwaysAnswerYes = alwaysAnswerYes;
//validate the name
if (name.indexOf("-") != -1) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_CONFIG,
Constants.OUTPUT_OP_RESULT_FAIL, Constants.PARAM_CLUSTER + Constants.PARAM_NOT_CONTAIN_HORIZONTAL_LINE);
return;
}
try {
ClusterRead clusterRead = restClient.get(name);
// build ClusterCreate object
ClusterCreate clusterConfig = new ClusterCreate();
clusterConfig.setName(clusterRead.getName());
ClusterCreate clusterSpec =
CommandsUtils.getObjectByJsonString(ClusterCreate.class, CommandsUtils.dataFromFile(specFilePath));
clusterConfig.setNodeGroups(clusterSpec.getNodeGroups());
clusterConfig.setConfiguration(clusterSpec.getConfiguration());
clusterConfig.setExternalHDFS(clusterSpec.getExternalHDFS());
List<String> warningMsgList = new ArrayList<String>();
validateConfiguration(clusterConfig, skipConfigValidation, warningMsgList);
// add a confirm message for running job
warningMsgList.add("Warning: " + Constants.PARAM_CLUSTER_CONFIG_RUNNING_JOB_WARNING);
if (!showWarningMsg(clusterConfig.getName(), warningMsgList)) {
return;
}
restClient.configCluster(clusterConfig);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_RESULT_CONFIG);
} catch (Exception e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_CONFIG,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
return;
}
}
private String getClusterResourceName(String cluster, String nodeGroup, String node) {
assert cluster != null; // Spring shell guarantees this
if (node != null && nodeGroup == null) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, cluster,
Constants.OUTPUT_OP_START, Constants.OUTPUT_OP_RESULT_FAIL,
Constants.OUTPUT_OP_NODEGROUP_MISSING);
return null;
}
StringBuilder res = new StringBuilder();
res.append(cluster);
if (nodeGroup != null) {
res.append("/nodegroup/").append(nodeGroup);
if (node != null) {
res.append("/node/").append(node);
}
}
return res.toString();
}
private boolean validateNodeName(String cluster, String group, String node) {
if (node != null) {
String[] parts = node.split("-");
if (parts.length == 1) {
return true;
}
if (parts.length == 3) {
if (!parts[0].equals(cluster)) {
return false;
}
if (group != null && !parts[1].equals(group)) {
return false;
}
return true;
}
return false;
}
return true;
}
private boolean validateNodeGroupName(String group) {
if (group != null) {
return group.indexOf("-") == -1;
}
return true;
}
private String autoCompleteNodeName(String cluster, String group, String node) {
assert cluster != null;
assert group != null;
assert node != null;
if (node.indexOf("-") == -1) {
StringBuilder sb = new StringBuilder();
sb.append(cluster).append("-").append(group).append("-").append(node);
return sb.toString();
}
return node;
}
private String extractNodeGroupName(String node) {
String[] parts = node.split("-");
if (parts.length == 3) {
return parts[1];
}
return null;
}
private void resumeCreateCluster(final String name) {
Map<String, String> queryStrings = new HashMap<String, String>();
queryStrings.put(Constants.QUERY_ACTION_KEY,
Constants.QUERY_ACTION_RESUME);
try {
restClient.actionOps(name, queryStrings);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_RESULT_RESUME);
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_RESUME, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
private List<String> getNetworkNames() {
List<String> networkNames = new ArrayList<String>(0);
NetworkRead[] networks = networkRestClient.getAll(false);
if (networks != null) {
for (NetworkRead network : networks)
networkNames.add(network.getName());
}
return networkNames;
}
private List<String> getDistroNames() {
List<String> distroNames = new ArrayList<String>(0);
DistroRead[] distros = distroRestClient.getAll();
if (distros != null) {
for (DistroRead distro : distros)
distroNames.add(distro.getName());
}
return distroNames;
}
private boolean validName(String inputName, List<String> validNames) {
for (String name : validNames) {
if (name.equals(inputName)) {
return true;
}
}
return false;
}
private void prettyOutputClusterInfo(ClusterRead cluster, boolean detail) {
TopologyType topology = cluster.getTopologyPolicy();
if (topology == null || topology == TopologyType.NONE) {
System.out.printf("name: %s, distro: %s, status: %s",
cluster.getName(), cluster.getDistro(), cluster.getStatus());
} else {
System.out.printf("name: %s, distro: %s, topology: %s, status: %s",
cluster.getName(), cluster.getDistro(), topology, cluster.getStatus());
}
System.out.println();
if(cluster.getExternalHDFS() != null && !cluster.getExternalHDFS().isEmpty()) {
System.out.printf("external HDFS: %s\n", cluster.getExternalHDFS());
}
LinkedHashMap<String, List<String>> ngColumnNamesWithGetMethodNames =
new LinkedHashMap<String, List<String>>();
List<NodeGroupRead> nodegroups = cluster.getNodeGroups();
if (nodegroups != null) {
ngColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_NAME, Arrays.asList("getName"));
ngColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_ROLES, Arrays.asList("getRoles"));
ngColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_INSTANCE,
Arrays.asList("getInstanceNum"));
ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_CPU,
Arrays.asList("getCpuNum"));
ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_MEM,
Arrays.asList("getMemCapacityMB"));
ngColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_TYPE,
Arrays.asList("getStorage", "getType"));
ngColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_SIZE,
Arrays.asList("getStorage", "getSizeGB"));
try {
if (detail) {
LinkedHashMap<String, List<String>> nColumnNamesWithGetMethodNames =
new LinkedHashMap<String, List<String>>();
nColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_NAME,
Arrays.asList("getName"));
if (topology == TopologyType.RACK_AS_RACK || topology == TopologyType.HVE) {
nColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_RACK,
Arrays.asList("getRack"));
}
nColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_HOST,
Arrays.asList("getHostName"));
nColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_IP, Arrays.asList("getIp"));
nColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_STATUS,
Arrays.asList("getStatus"));
for (NodeGroupRead nodegroup : nodegroups) {
CommandsUtils.printInTableFormat(
ngColumnNamesWithGetMethodNames,
new NodeGroupRead[] { nodegroup },
Constants.OUTPUT_INDENT);
List<NodeRead> nodes = nodegroup.getInstances();
if (nodes != null) {
System.out.println();
CommandsUtils.printInTableFormat(
nColumnNamesWithGetMethodNames, nodes.toArray(),
new StringBuilder().append(Constants.OUTPUT_INDENT)
.append(Constants.OUTPUT_INDENT).toString());
}
System.out.println();
}
} else
CommandsUtils.printInTableFormat(
ngColumnNamesWithGetMethodNames, nodegroups.toArray(),
Constants.OUTPUT_INDENT);
} catch (Exception e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
cluster.getName(), Constants.OUTPUT_OP_LIST,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
}
}
}
private void prettyOutputClustersInfo(ClusterRead[] clusters, boolean detail) {
for (ClusterRead cluster : clusters) {
prettyOutputClusterInfo(cluster, detail);
System.out.println();
}
}
/**
* Validate nodeGroupCreates member formats and values in the ClusterCreate.
*/
private boolean validateClusterCreate(ClusterCreate clusterCreate) {
// validation status
boolean validated = true;
// show warning message
boolean warning = false;
//role count
int masterCount = 0, workerCount = 0, clientCount = 0;
//Find NodeGroupCreate array from current ClusterCreate instance.
NodeGroupCreate[] nodeGroupCreates = clusterCreate.getNodeGroups();
if (nodeGroupCreates == null || nodeGroupCreates.length == 0) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
clusterCreate.getName(), Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL, Constants.MULTI_INPUTS_CHECK);
return !validated;
} else {
//used for collecting failed message.
List<String> failedMsgList = new LinkedList<String>();
List<String> warningMsgList = new LinkedList<String>();
//find distro roles.
List<String> distroRoles = findDistroRoles(clusterCreate);
if (distroRoles == null) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
clusterCreate.getName(), Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_NO_DISTRO_AVAILABLE);
return !validated;
}
if (nodeGroupCreates.length < 2 || nodeGroupCreates.length > 5) {
warningMsgList.add(Constants.PARAM_CLUSTER_WARNING);
warning = true;
}
// check external HDFS
if (clusterCreate.hasHDFSUrlConfigured() && !clusterCreate.validateHDFSUrl()) {
failedMsgList.add(new StringBuilder()
.append("externalHDFS=")
.append(clusterCreate.getExternalHDFS()).toString());
validated = false;
}
// check placement policies
if (!clusterCreate.validateNodeGroupPlacementPolicies(failedMsgList, warningMsgList)) {
validated = false;
}
if (!clusterCreate.validateNodeGroupRoles(failedMsgList)) {
validated = false;
}
for (NodeGroupCreate nodeGroupCreate : nodeGroupCreates) {
// check node group's instanceNum
if (!checkInstanceNum(nodeGroupCreate, failedMsgList)) {
validated = false;
}
// check node group's roles
if (!checkNodeGroupRoles(nodeGroupCreate, distroRoles,
failedMsgList)) {
validated = false;
}
// get node group role .
NodeGroupRole role = getNodeGroupRole(nodeGroupCreate);
switch (role) {
case MASTER:
masterCount++;
if (nodeGroupCreate.getInstanceNum() >= 0
&& nodeGroupCreate.getInstanceNum() != 1) {
validated = false;
collectInstanceNumInvalidateMsg(nodeGroupCreate,
failedMsgList);
}
break;
case WORKER:
workerCount++;
if (nodeGroupCreate.getInstanceNum() == 0) {
validated = false;
collectInstanceNumInvalidateMsg(nodeGroupCreate,
failedMsgList);
} else if (isHAFlag(nodeGroupCreate)) {
warning = true;
}
break;
case CLIENT:
clientCount++;
if (isHAFlag(nodeGroupCreate)) {
warning = true;
}
break;
case NONE:
warning = true;
break;
default:
}
}
if ((masterCount < 1 || masterCount > 2) || (workerCount < 1 || workerCount > 2) ||
clientCount > 1) {
warning = true;
}
if (!validated) {
showFailedMsg(clusterCreate.getName(), failedMsgList);
} else if (warning || warningMsgList != null) {
// If warning is true,show waring message.
if (!showWarningMsg(clusterCreate.getName(), warningMsgList)) {
// When exist warning message,whether to proceed
validated = false;
}
}
return validated;
}
}
private boolean isContinue(String clusterName, String operateType, String promptMsg) {
if (this.alwaysAnswerYes) {
return true;
}
boolean continueCreate = true;
boolean continueLoop = true;
String readMsg = "";
try {
ConsoleReader reader = new ConsoleReader();
// Set prompt message
reader.setDefaultPrompt(promptMsg);
int k = 0;
while (continueLoop) {
if (k >= 3) {
continueCreate = false;
break;
}
// Read user input
readMsg = reader.readLine();
if (readMsg.trim().equalsIgnoreCase("yes")
|| readMsg.trim().equalsIgnoreCase("y")) {
continueLoop = false;
} else if (readMsg.trim().equalsIgnoreCase("no")
|| readMsg.trim().equalsIgnoreCase("n")) {
continueLoop = false;
continueCreate = false;
} else {
k++;
}
}
} catch (Exception e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
clusterName, operateType,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
continueCreate = false;
}
return continueCreate;
}
private NodeGroupRole getNodeGroupRole(NodeGroupCreate nodeGroupCreate) {
//Find roles list from current NodeGroupCreate instance.
List<String> roles = nodeGroupCreate.getRoles();
for (NodeGroupRole role : NodeGroupRole.values()) {
if (matchRole(role, roles)) {
return role;
}
}
return NodeGroupRole.NONE;
}
/**
* Check the roles was introduced, whether matching with system's specialize
* role.
*/
private boolean matchRole(NodeGroupRole role, List<String> roles) {
List<String> matchRoles = new LinkedList<String>();
switch (role) {
case MASTER:
if (roles.size() == 1) {
String r = roles.get(0);
return Constants.ROLE_HADOOP_NAME_NODE.equals(r) ||
Constants.ROLE_HADOOP_JOB_TRACKER.equals(r);
} else if (roles.size() == 2) {
matchRoles.add(Constants.ROLE_HADOOP_NAME_NODE);
matchRoles.add(Constants.ROLE_HADOOP_JOB_TRACKER);
matchRoles.removeAll(roles);
return matchRoles.size() == 0 ? true : false;
}
return false;
case WORKER:
if (roles.size() == 1) {
if (Constants.ROLE_HADOOP_DATANODE.equals(roles.get(0)) ||
Constants.ROLE_HADOOP_TASKTRACKER.equals(roles.get(0))) {
return true;
}
return false;
} else {
matchRoles.add(Constants.ROLE_HADOOP_DATANODE);
matchRoles.add(Constants.ROLE_HADOOP_TASKTRACKER);
matchRoles.removeAll(roles);
return matchRoles.size() == 0 ? true : false;
}
case CLIENT:
if (roles.size() < 1 || roles.size() > 4) {
return false;
} else {
matchRoles.add(Constants.ROLE_HADOOP_CLIENT);
matchRoles.add(Constants.ROLE_HIVE);
matchRoles.add(Constants.ROLE_HIVE_SERVER);
matchRoles.add(Constants.ROLE_PIG);
int diffNum = matchRoles.size() - roles.size();
matchRoles.removeAll(roles);
return roles.contains(Constants.ROLE_HADOOP_CLIENT)
&& (diffNum >= 0) && (diffNum == matchRoles.size()) ? true
: false;
}
}
return false;
}
private void showWarningMsg() {
System.out.println(Constants.PARAM_CLUSTER_WARNING);
}
private boolean checkInstanceNum(NodeGroupCreate nodeGroup,
List<String> failedMsgList) {
boolean validated = true;
if (nodeGroup.getInstanceNum() < 0) {
validated = false;
collectInstanceNumInvalidateMsg(nodeGroup, failedMsgList);
}
return validated;
}
private void collectInstanceNumInvalidateMsg(NodeGroupCreate nodeGroup,
List<String> failedMsgList) {
failedMsgList.add(new StringBuilder().append(nodeGroup.getName())
.append(".").append("instanceNum=")
.append(nodeGroup.getInstanceNum()).toString());
}
private boolean checkNodeGroupRoles(NodeGroupCreate nodeGroup,
List<String> distroRoles, List<String> failedMsgList) {
List<String> roles = nodeGroup.getRoles();
boolean validated = true;
StringBuilder rolesMsg = new StringBuilder();
for (String role : roles) {
if (!distroRoles.contains(role)) {
validated = false;
rolesMsg.append(",").append(role);
}
}
if (!validated) {
rolesMsg.replace(0, 1, "");
failedMsgList.add(new StringBuilder().append(nodeGroup.getName())
.append(".").append("roles=").append("\"")
.append(rolesMsg.toString()).append("\"").toString());
}
return validated;
}
private List<String> findDistroRoles(ClusterCreate clusterCreate) {
DistroRead distroRead = null;
distroRead =
distroRestClient
.get(clusterCreate.getDistro() != null ? clusterCreate
.getDistro() : Constants.DEFAULT_DISTRO);
if (distroRead != null) {
return distroRead.getRoles();
} else {
return null;
}
}
private void showFailedMsg(String name, List<String> failedMsgList) {
//cluster creation failed message.
StringBuilder failedMsg = new StringBuilder();
failedMsg.append(Constants.INVALID_VALUE);
if (failedMsgList.size() > 1) {
failedMsg.append("s");
}
failedMsg.append(" ");
StringBuilder tmpMsg = new StringBuilder();
for (String msg : failedMsgList) {
tmpMsg.append(",").append(msg);
}
tmpMsg.replace(0, 1, "");
failedMsg.append(tmpMsg);
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_CREATE, Constants.OUTPUT_OP_RESULT_FAIL,
failedMsg.toString());
}
private void validateConfiguration(ClusterCreate cluster, boolean skipConfigValidation, List<String> warningMsgList) {
// validate blacklist
ValidateResult blackListResult = validateBlackList(cluster);
if (blackListResult != null) {
addBlackListWarning(blackListResult, warningMsgList);
}
if (!skipConfigValidation) {
// validate whitelist
ValidateResult whiteListResult = validateWhiteList(cluster);
addWhiteListWarning(cluster.getName(), whiteListResult, warningMsgList);
} else {
cluster.setValidateConfig(false);
}
}
private ValidateResult validateBlackList(ClusterCreate cluster) {
return validateConfiguration(cluster, ValidationType.BLACK_LIST);
}
private ValidateResult validateWhiteList(ClusterCreate cluster) {
return validateConfiguration(cluster, ValidationType.WHITE_LIST);
}
private ValidateResult validateConfiguration(ClusterCreate cluster, ValidationType validationType) {
ValidateResult validateResult = new ValidateResult();
// validate cluster level Configuration
ValidateResult vr = null;
if (cluster.getConfiguration() != null && !cluster.getConfiguration().isEmpty()) {
vr = AppConfigValidationUtils.validateConfig(validationType, cluster.getConfiguration());
if (vr.getType() != ValidateResult.Type.VALID) {
validateResult.setType(vr.getType());
validateResult.setFailureNames(vr.getFailureNames());
}
}
// validate nodegroup level Configuration
for (NodeGroupCreate nodeGroup : cluster.getNodeGroups()) {
if (nodeGroup.getConfiguration() != null && !nodeGroup.getConfiguration().isEmpty()) {
vr = AppConfigValidationUtils.validateConfig(validationType, nodeGroup.getConfiguration());
if (vr.getType() != ValidateResult.Type.VALID) {
validateResult.setType(vr.getType());
List<String> failureNames = new LinkedList<String>();
failureNames.addAll(validateResult.getFailureNames());
for (String name : vr.getFailureNames()) {
if (!failureNames.contains(name)) {
failureNames.add(name);
}
}
validateResult.setFailureNames(vr.getFailureNames());
}
}
}
return validateResult;
}
private void addWhiteListWarning(final String clusterName, ValidateResult whiteListResult,
List<String> warningMsgList) {
if (whiteListResult.getType() == ValidateResult.Type.WHITE_LIST_INVALID_NAME) {
String warningMsg =
getValidateWarningMsg(whiteListResult.getFailureNames(),
Constants.PARAM_CLUSTER_NOT_IN_WHITE_LIST_WARNING);
if (warningMsgList != null) {
warningMsgList.add(warningMsg);
}
}
}
private void addBlackListWarning(ValidateResult blackListResult, List<String> warningList) {
if (blackListResult.getType() == ValidateResult.Type.NAME_IN_BLACK_LIST) {
String warningMsg =
getValidateWarningMsg(blackListResult.getFailureNames(), Constants.PARAM_CLUSTER_IN_BLACK_LIST_WARNING);
if (warningList != null)
warningList.add(warningMsg);
}
}
private String getValidateWarningMsg(List<String> failureNames, String warningMsg) {
StringBuilder warningMsgBuff = new StringBuilder();
if (failureNames != null && !failureNames.isEmpty()) {
warningMsgBuff.append("Warning: ");
for (String failureName : failureNames) {
warningMsgBuff.append(failureName).append(", ");
}
warningMsgBuff.delete(warningMsgBuff.length() - 2, warningMsgBuff.length());
if (failureNames.size() > 1) {
warningMsgBuff.append(" are ");
} else {
warningMsgBuff.append(" is ");
}
warningMsgBuff.append(warningMsg);
}
return warningMsgBuff.toString();
}
private boolean showWarningMsg(String clusterName, List<String> warningMsgList) {
if (warningMsgList != null && !warningMsgList.isEmpty()) {
for (String message : warningMsgList) {
System.out.println(message);
}
if (!isContinue(clusterName, Constants.OUTPUT_OP_CREATE, Constants.PARAM_PROMPT_CONTINUE_MESSAGE)) {
return false;
}
}
return true;
}
private boolean isHAFlag(NodeGroupCreate nodeGroupCreate) {
return !CommandsUtils.isBlank(nodeGroupCreate.getHaFlag())
&& !nodeGroupCreate.getHaFlag().equalsIgnoreCase("off");
}
private boolean validateHAInfo(NodeGroupCreate[] nodeGroups) {
List<String> haFlagList = Arrays.asList("off","on","ft");
if (nodeGroups != null){
for(NodeGroupCreate group : nodeGroups){
if (!haFlagList.contains(group.getHaFlag().toLowerCase())){
return false;
}
}
}
return true;
}
}
| true
| true
|
public void createCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name,
@CliOption(key = { "distro" }, mandatory = false, help = "Hadoop Distro") final String distro,
@CliOption(key = { "specFile" }, mandatory = false, help = "The spec file name path") final String specFilePath,
@CliOption(key = { "rpNames" }, mandatory = false, help = "Resource Pools for the cluster: use \",\" among names.") final String rpNames,
@CliOption(key = { "dsNames" }, mandatory = false, help = "Datastores for the cluster: use \",\" among names.") final String dsNames,
@CliOption(key = { "networkName" }, mandatory = false, help = "Network Name") final String networkName,
@CliOption(key = { "topology" }, mandatory = false, help = "Please specify the topology type: HVE or RACK_HOST or HOST_AS_RACK") final String topology,
@CliOption(key = { "resume" }, mandatory = false, specifiedDefaultValue = "true", unspecifiedDefaultValue = "false", help = "flag to resume cluster creation") final boolean resume,
@CliOption(key = { "skipConfigValidation" }, mandatory = false, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = "Skip cluster configuration validation. ") final boolean skipConfigValidation,
@CliOption(key = { "yes" }, mandatory = false, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = "Answer 'yes' to all Y/N questions. ") final boolean alwaysAnswerYes) {
this.alwaysAnswerYes = alwaysAnswerYes;
//validate the name
if (name.indexOf("-") != -1) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_CREATE, Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_CLUSTER
+ Constants.PARAM_NOT_CONTAIN_HORIZONTAL_LINE);
return;
}
//process resume
if (resume) {
resumeCreateCluster(name);
return;
}
// build ClusterCreate object
ClusterCreate clusterCreate = new ClusterCreate();
clusterCreate.setName(name);
if (topology != null) {
try {
clusterCreate.setTopologyPolicy(TopologyType.valueOf(topology));
} catch (IllegalArgumentException ex) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_CREATE, Constants.OUTPUT_OP_RESULT_FAIL,
Constants.INVALID_VALUE + " " + "topologyType=" + topology);
return;
}
} else {
clusterCreate.setTopologyPolicy(null);
}
if (distro != null) {
List<String> distroNames = getDistroNames();
if (validName(distro, distroNames)) {
clusterCreate.setDistro(distro);
} else {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL, Constants.PARAM_DISTRO
+ Constants.PARAM_NOT_SUPPORTED + distroNames);
return;
}
}
clusterCreate.setType(Enum.valueOf(ClusterType.class, "HADOOP"));
if (rpNames != null) {
List<String> rpNamesList = CommandsUtils.inputsConvert(rpNames);
if (rpNamesList.isEmpty()) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.INPUT_RPNAMES_PARAM + Constants.MULTI_INPUTS_CHECK);
return;
} else {
clusterCreate.setRpNames(rpNamesList);
}
}
if (dsNames != null) {
List<String> dsNamesList = CommandsUtils.inputsConvert(dsNames);
if (dsNamesList.isEmpty()) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.INPUT_DSNAMES_PARAM + Constants.MULTI_INPUTS_CHECK);
return;
} else {
clusterCreate.setDsNames(dsNamesList);
}
}
List<String> warningMsgList = new ArrayList<String>();
List<String> networkNames = null;
try {
if (specFilePath != null) {
ClusterCreate clusterSpec =
CommandsUtils.getObjectByJsonString(ClusterCreate.class, CommandsUtils.dataFromFile(specFilePath));
clusterCreate.setExternalHDFS(clusterSpec.getExternalHDFS());
clusterCreate.setNodeGroups(clusterSpec.getNodeGroups());
clusterCreate.setConfiguration(clusterSpec.getConfiguration());
validateConfiguration(clusterCreate, skipConfigValidation, warningMsgList);
if (!validateHAInfo(clusterCreate.getNodeGroups())){
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_CLUSTER_SPEC_HA_ERROR + specFilePath);
return;
}
}
networkNames = getNetworkNames();
} catch (Exception e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
return;
}
if (networkNames.isEmpty()) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_CREATE, Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_NETWORK_NAME + Constants.PARAM_NOT_EXISTED);
return;
} else {
if (networkName != null) {
if (validName(networkName, networkNames)) {
clusterCreate.setNetworkName(networkName);
} else {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_NETWORK_NAME
+ Constants.PARAM_NOT_SUPPORTED + networkNames);
return;
}
} else {
if (networkNames.size() == 1) {
clusterCreate.setNetworkName(networkNames.get(0));
} else {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_NETWORK_NAME
+ Constants.PARAM_NOT_SPECIFIED);
return;
}
}
}
// Validate that the specified file is correct json format and proper value.
if (specFilePath != null) {
if (!validateClusterCreate(clusterCreate)) {
return;
}
}
// process topology option
if (topology == null) {
clusterCreate.setTopologyPolicy(TopologyType.NONE);
} else {
try {
clusterCreate.setTopologyPolicy(TopologyType.valueOf(topology));
} catch (IllegalArgumentException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL, Constants.INPUT_TOPOLOGY_INVALID_VALUE);
System.out.println("Please specify the topology type: HVE or RACK_HOST or HOST_AS_RACK");
}
}
// rest invocation
try {
if (!showWarningMsg(clusterCreate.getName(), warningMsgList)) {
return;
}
restClient.create(clusterCreate);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_RESULT_CREAT);
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
}
}
@CliCommand(value = "cluster list", help = "Get cluster information")
public void getCluster(
@CliOption(key = { "name" }, mandatory = false, help = "The cluster name") final String name,
@CliOption(key = { "detail" }, mandatory = false, specifiedDefaultValue = "true", unspecifiedDefaultValue = "false", help = "flag to show node information") final boolean detail) {
// rest invocation
try {
if (name == null) {
ClusterRead[] clusters = restClient.getAll();
if (clusters != null) {
prettyOutputClustersInfo(clusters, detail);
}
} else {
ClusterRead cluster = restClient.get(name);
if (cluster != null) {
prettyOutputClusterInfo(cluster, detail);
}
}
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_LIST, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
@CliCommand(value = "cluster export --spec", help = "Export cluster specification")
public void exportClusterSpec(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name,
@CliOption(key = { "output" }, mandatory = false, help = "The output file name") final String fileName) {
// rest invocation
try {
ClusterCreate cluster = restClient.getSpec(name);
if (cluster != null) {
CommandsUtils.prettyJsonOutput(cluster, fileName);
}
} catch (Exception e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_EXPORT, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
@CliCommand(value = "cluster delete", help = "Delete a cluster")
public void deleteCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name) {
//rest invocation
try {
restClient.delete(name);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_RESULT_DELETE);
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_DELETE, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
@CliCommand(value = "cluster start", help = "Start a cluster")
public void startCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String clusterName,
@CliOption(key = { "nodeGroupName" }, mandatory = false, help = "The node group name") final String nodeGroupName,
@CliOption(key = { "nodeName" }, mandatory = false, help = "The node name") final String nodeName) {
Map<String, String> queryStrings = new HashMap<String, String>();
queryStrings
.put(Constants.QUERY_ACTION_KEY, Constants.QUERY_ACTION_START);
//rest invocation
try {
if (!validateNodeGroupName(nodeGroupName)) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_START, Constants.OUTPUT_OP_RESULT_FAIL,
"invalid node group name");
return;
}
if (!validateNodeName(clusterName, nodeGroupName, nodeName)) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_START, Constants.OUTPUT_OP_RESULT_FAIL,
"invalid node name");
return;
}
String groupName = nodeGroupName;
String fullNodeName = nodeName;
if (nodeName != null) {
if (nodeGroupName == null) {
groupName = extractNodeGroupName(nodeName);
if (groupName == null) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_START, Constants.OUTPUT_OP_RESULT_FAIL,
"missing node group name");
return;
}
} else {
fullNodeName = autoCompleteNodeName(clusterName, nodeGroupName, nodeName);
}
}
String resource = getClusterResourceName(clusterName, groupName, fullNodeName);
if (resource != null) {
restClient.actionOps(resource, clusterName, queryStrings);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_RESULT_START);
}
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_START, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
@CliCommand(value = "cluster stop", help = "Stop a cluster")
public void stopCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String clusterName,
@CliOption(key = { "nodeGroupName" }, mandatory = false, help = "The node group name") final String nodeGroupName,
@CliOption(key = { "nodeName" }, mandatory = false, help = "The node name") final String nodeName) {
Map<String, String> queryStrings = new HashMap<String, String>();
queryStrings.put(Constants.QUERY_ACTION_KEY, Constants.QUERY_ACTION_STOP);
//rest invocation
try {
if (!validateNodeGroupName(nodeGroupName)) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_STOP, Constants.OUTPUT_OP_RESULT_FAIL,
"invalid node group name");
return;
}
if (!validateNodeName(clusterName, nodeGroupName, nodeName)) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_STOP, Constants.OUTPUT_OP_RESULT_FAIL,
"invalid node name");
return;
}
String groupName = nodeGroupName;
String fullNodeName = nodeName;
if (nodeName != null) {
if (nodeGroupName == null) {
groupName = extractNodeGroupName(nodeName);
if (groupName == null) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_STOP, Constants.OUTPUT_OP_RESULT_FAIL,
"missing node group name");
return;
}
} else {
fullNodeName = autoCompleteNodeName(clusterName, nodeGroupName, nodeName);
}
}
String resource = getClusterResourceName(clusterName, groupName, fullNodeName);
if (resource != null) {
restClient.actionOps(resource, clusterName, queryStrings);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_RESULT_STOP);
}
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_STOP, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
@CliCommand(value = "cluster resize", help = "Resize a cluster")
public void resizeCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name,
@CliOption(key = { "nodeGroup" }, mandatory = true, help = "The node group name") final String nodeGroup,
@CliOption(key = { "instanceNum" }, mandatory = true, help = "The resized number of instances. It should be larger that existing one") final int instanceNum) {
if (instanceNum > 1) {
try {
restClient.resize(name, nodeGroup, instanceNum);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_RESULT_RESIZE);
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_RESIZE,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
}
} else {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_RESIZE, Constants.OUTPUT_OP_RESULT_FAIL,
Constants.INVALID_VALUE + " instanceNum=" + instanceNum);
}
}
@CliCommand(value = "cluster target", help = "Set or query target cluster to run commands")
public void targetCluster(
@CliOption(key = { "name" }, mandatory = false, help = "The cluster name") final String name,
@CliOption(key = { "info" }, mandatory = false, specifiedDefaultValue = "true", unspecifiedDefaultValue = "false", help = "flag to show target information") final boolean info) {
ClusterRead cluster = null;
try {
if (info) {
if (name != null) {
System.out.println("Warning: can't specify option --name and --info at the same time");
return;
}
String fsUrl = hadoopConfiguration.get("fs.default.name");
String jtUrl = hadoopConfiguration.get("mapred.job.tracker");
if ((fsUrl == null || fsUrl.length() == 0) && (jtUrl == null || jtUrl.length() == 0)) {
System.out.println("There is no targeted cluster. Please use \"cluster target --name\" to target first");
return;
}
if(targetClusterName != null && targetClusterName.length() > 0){
System.out.println("Cluster : " + targetClusterName);
}
if (fsUrl != null && fsUrl.length() > 0) {
System.out.println("HDFS url : " + fsUrl);
}
if (jtUrl != null && jtUrl.length() > 0) {
System.out.println("Job Tracker url : " + jtUrl);
}
if (hiveInfo != null && hiveInfo.length() > 0) {
System.out.println("Hive server info: " + hiveInfo);
}
} else {
if (name == null) {
ClusterRead[] clusters = restClient.getAll();
if (clusters != null && clusters.length > 0) {
cluster = clusters[0];
}
} else {
cluster = restClient.get(name);
}
if (cluster == null) {
System.out.println("Failed to target cluster: The cluster " + name + "is not found");
setFsURL("");
setJobTrackerURL("");
this.setHiveServer("");
} else {
targetClusterName = cluster.getName();
for (NodeGroupRead nodeGroup : cluster.getNodeGroups()) {
for (String role : nodeGroup.getRoles()) {
if (role.equals("hadoop_namenode")) {
List<NodeRead> nodes = nodeGroup.getInstances();
if (nodes != null && nodes.size() > 0) {
String nameNodeIP = nodes.get(0).getIp();
setNameNode(nameNodeIP);
} else {
throw new CliRestException("no name node available");
}
}
if (role.equals("hadoop_jobtracker")) {
List<NodeRead> nodes = nodeGroup.getInstances();
if (nodes != null && nodes.size() > 0) {
String jobTrackerIP = nodes.get(0).getIp();
setJobTracker(jobTrackerIP);
} else {
throw new CliRestException("no job tracker available");
}
}
if (role.equals("hive_server")) {
List<NodeRead> nodes = nodeGroup.getInstances();
if (nodes != null && nodes.size() > 0) {
String hiveServerIP = nodes.get(0).getIp();
setHiveServer(hiveServerIP);
} else {
throw new CliRestException("no hive server available");
}
}
}
}
if (cluster.getExternalHDFS() != null && !cluster.getExternalHDFS().isEmpty()) {
setFsURL(cluster.getExternalHDFS());
}
}
}
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_TARGET,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
setFsURL("");
setJobTrackerURL("");
this.setHiveServer("");
}
}
private void setNameNode(String nameNodeAddress) {
String hdfsUrl = "hdfs://" + nameNodeAddress + ":8020";
setFsURL(hdfsUrl);
}
private void setFsURL(String fsURL) {
hadoopConfiguration.set("fs.default.name", fsURL);
}
private void setJobTracker(String jobTrackerAddress) {
String jobTrackerUrl = jobTrackerAddress + ":8021";
setJobTrackerURL(jobTrackerUrl);
}
private void setJobTrackerURL(String jobTrackerUrl){
hadoopConfiguration.set("mapred.job.tracker", jobTrackerUrl);
}
private void setHiveServer(String hiveServerAddress) {
try {
hiveInfo = hiveCommands.config(hiveServerAddress, 10000, null);
} catch (Exception e) {
throw new CliRestException("faild to set hive server address");
}
}
@CliCommand(value = "cluster config", help = "Config an existing cluster")
public void configCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name,
@CliOption(key = { "specFile" }, mandatory = true, help = "The spec file name path") final String specFilePath,
@CliOption(key = { "skipConfigValidation" }, mandatory = false, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = "Skip cluster configuration validation. ") final boolean skipConfigValidation,
@CliOption(key = { "yes" }, mandatory = false, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = "Answer 'yes' to all Y/N questions. ") final boolean alwaysAnswerYes) {
this.alwaysAnswerYes = alwaysAnswerYes;
//validate the name
if (name.indexOf("-") != -1) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_CONFIG,
Constants.OUTPUT_OP_RESULT_FAIL, Constants.PARAM_CLUSTER + Constants.PARAM_NOT_CONTAIN_HORIZONTAL_LINE);
return;
}
try {
ClusterRead clusterRead = restClient.get(name);
// build ClusterCreate object
ClusterCreate clusterConfig = new ClusterCreate();
clusterConfig.setName(clusterRead.getName());
ClusterCreate clusterSpec =
CommandsUtils.getObjectByJsonString(ClusterCreate.class, CommandsUtils.dataFromFile(specFilePath));
clusterConfig.setNodeGroups(clusterSpec.getNodeGroups());
clusterConfig.setConfiguration(clusterSpec.getConfiguration());
clusterConfig.setExternalHDFS(clusterSpec.getExternalHDFS());
List<String> warningMsgList = new ArrayList<String>();
validateConfiguration(clusterConfig, skipConfigValidation, warningMsgList);
// add a confirm message for running job
warningMsgList.add("Warning: " + Constants.PARAM_CLUSTER_CONFIG_RUNNING_JOB_WARNING);
if (!showWarningMsg(clusterConfig.getName(), warningMsgList)) {
return;
}
restClient.configCluster(clusterConfig);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_RESULT_CONFIG);
} catch (Exception e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_CONFIG,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
return;
}
}
private String getClusterResourceName(String cluster, String nodeGroup, String node) {
assert cluster != null; // Spring shell guarantees this
if (node != null && nodeGroup == null) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, cluster,
Constants.OUTPUT_OP_START, Constants.OUTPUT_OP_RESULT_FAIL,
Constants.OUTPUT_OP_NODEGROUP_MISSING);
return null;
}
StringBuilder res = new StringBuilder();
res.append(cluster);
if (nodeGroup != null) {
res.append("/nodegroup/").append(nodeGroup);
if (node != null) {
res.append("/node/").append(node);
}
}
return res.toString();
}
private boolean validateNodeName(String cluster, String group, String node) {
if (node != null) {
String[] parts = node.split("-");
if (parts.length == 1) {
return true;
}
if (parts.length == 3) {
if (!parts[0].equals(cluster)) {
return false;
}
if (group != null && !parts[1].equals(group)) {
return false;
}
return true;
}
return false;
}
return true;
}
private boolean validateNodeGroupName(String group) {
if (group != null) {
return group.indexOf("-") == -1;
}
return true;
}
private String autoCompleteNodeName(String cluster, String group, String node) {
assert cluster != null;
assert group != null;
assert node != null;
if (node.indexOf("-") == -1) {
StringBuilder sb = new StringBuilder();
sb.append(cluster).append("-").append(group).append("-").append(node);
return sb.toString();
}
return node;
}
private String extractNodeGroupName(String node) {
String[] parts = node.split("-");
if (parts.length == 3) {
return parts[1];
}
return null;
}
private void resumeCreateCluster(final String name) {
Map<String, String> queryStrings = new HashMap<String, String>();
queryStrings.put(Constants.QUERY_ACTION_KEY,
Constants.QUERY_ACTION_RESUME);
try {
restClient.actionOps(name, queryStrings);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_RESULT_RESUME);
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_RESUME, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
private List<String> getNetworkNames() {
List<String> networkNames = new ArrayList<String>(0);
NetworkRead[] networks = networkRestClient.getAll(false);
if (networks != null) {
for (NetworkRead network : networks)
networkNames.add(network.getName());
}
return networkNames;
}
private List<String> getDistroNames() {
List<String> distroNames = new ArrayList<String>(0);
DistroRead[] distros = distroRestClient.getAll();
if (distros != null) {
for (DistroRead distro : distros)
distroNames.add(distro.getName());
}
return distroNames;
}
private boolean validName(String inputName, List<String> validNames) {
for (String name : validNames) {
if (name.equals(inputName)) {
return true;
}
}
return false;
}
private void prettyOutputClusterInfo(ClusterRead cluster, boolean detail) {
TopologyType topology = cluster.getTopologyPolicy();
if (topology == null || topology == TopologyType.NONE) {
System.out.printf("name: %s, distro: %s, status: %s",
cluster.getName(), cluster.getDistro(), cluster.getStatus());
} else {
System.out.printf("name: %s, distro: %s, topology: %s, status: %s",
cluster.getName(), cluster.getDistro(), topology, cluster.getStatus());
}
System.out.println();
if(cluster.getExternalHDFS() != null && !cluster.getExternalHDFS().isEmpty()) {
System.out.printf("external HDFS: %s\n", cluster.getExternalHDFS());
}
LinkedHashMap<String, List<String>> ngColumnNamesWithGetMethodNames =
new LinkedHashMap<String, List<String>>();
List<NodeGroupRead> nodegroups = cluster.getNodeGroups();
if (nodegroups != null) {
ngColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_NAME, Arrays.asList("getName"));
ngColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_ROLES, Arrays.asList("getRoles"));
ngColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_INSTANCE,
Arrays.asList("getInstanceNum"));
ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_CPU,
Arrays.asList("getCpuNum"));
ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_MEM,
Arrays.asList("getMemCapacityMB"));
ngColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_TYPE,
Arrays.asList("getStorage", "getType"));
ngColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_SIZE,
Arrays.asList("getStorage", "getSizeGB"));
try {
if (detail) {
LinkedHashMap<String, List<String>> nColumnNamesWithGetMethodNames =
new LinkedHashMap<String, List<String>>();
nColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_NAME,
Arrays.asList("getName"));
if (topology == TopologyType.RACK_AS_RACK || topology == TopologyType.HVE) {
nColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_RACK,
Arrays.asList("getRack"));
}
nColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_HOST,
Arrays.asList("getHostName"));
nColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_IP, Arrays.asList("getIp"));
nColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_STATUS,
Arrays.asList("getStatus"));
for (NodeGroupRead nodegroup : nodegroups) {
CommandsUtils.printInTableFormat(
ngColumnNamesWithGetMethodNames,
new NodeGroupRead[] { nodegroup },
Constants.OUTPUT_INDENT);
List<NodeRead> nodes = nodegroup.getInstances();
if (nodes != null) {
System.out.println();
CommandsUtils.printInTableFormat(
nColumnNamesWithGetMethodNames, nodes.toArray(),
new StringBuilder().append(Constants.OUTPUT_INDENT)
.append(Constants.OUTPUT_INDENT).toString());
}
System.out.println();
}
} else
CommandsUtils.printInTableFormat(
ngColumnNamesWithGetMethodNames, nodegroups.toArray(),
Constants.OUTPUT_INDENT);
} catch (Exception e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
cluster.getName(), Constants.OUTPUT_OP_LIST,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
}
}
}
private void prettyOutputClustersInfo(ClusterRead[] clusters, boolean detail) {
for (ClusterRead cluster : clusters) {
prettyOutputClusterInfo(cluster, detail);
System.out.println();
}
}
/**
* Validate nodeGroupCreates member formats and values in the ClusterCreate.
*/
private boolean validateClusterCreate(ClusterCreate clusterCreate) {
// validation status
boolean validated = true;
// show warning message
boolean warning = false;
//role count
int masterCount = 0, workerCount = 0, clientCount = 0;
//Find NodeGroupCreate array from current ClusterCreate instance.
NodeGroupCreate[] nodeGroupCreates = clusterCreate.getNodeGroups();
if (nodeGroupCreates == null || nodeGroupCreates.length == 0) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
clusterCreate.getName(), Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL, Constants.MULTI_INPUTS_CHECK);
return !validated;
} else {
//used for collecting failed message.
List<String> failedMsgList = new LinkedList<String>();
List<String> warningMsgList = new LinkedList<String>();
//find distro roles.
List<String> distroRoles = findDistroRoles(clusterCreate);
if (distroRoles == null) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
clusterCreate.getName(), Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_NO_DISTRO_AVAILABLE);
return !validated;
}
if (nodeGroupCreates.length < 2 || nodeGroupCreates.length > 5) {
warningMsgList.add(Constants.PARAM_CLUSTER_WARNING);
warning = true;
}
// check external HDFS
if (clusterCreate.hasHDFSUrlConfigured() && !clusterCreate.validateHDFSUrl()) {
failedMsgList.add(new StringBuilder()
.append("externalHDFS=")
.append(clusterCreate.getExternalHDFS()).toString());
validated = false;
}
// check placement policies
if (!clusterCreate.validateNodeGroupPlacementPolicies(failedMsgList, warningMsgList)) {
validated = false;
}
if (!clusterCreate.validateNodeGroupRoles(failedMsgList)) {
validated = false;
}
for (NodeGroupCreate nodeGroupCreate : nodeGroupCreates) {
// check node group's instanceNum
if (!checkInstanceNum(nodeGroupCreate, failedMsgList)) {
validated = false;
}
// check node group's roles
if (!checkNodeGroupRoles(nodeGroupCreate, distroRoles,
failedMsgList)) {
validated = false;
}
// get node group role .
NodeGroupRole role = getNodeGroupRole(nodeGroupCreate);
switch (role) {
case MASTER:
masterCount++;
if (nodeGroupCreate.getInstanceNum() >= 0
&& nodeGroupCreate.getInstanceNum() != 1) {
validated = false;
collectInstanceNumInvalidateMsg(nodeGroupCreate,
failedMsgList);
}
break;
case WORKER:
workerCount++;
if (nodeGroupCreate.getInstanceNum() == 0) {
validated = false;
collectInstanceNumInvalidateMsg(nodeGroupCreate,
failedMsgList);
} else if (isHAFlag(nodeGroupCreate)) {
warning = true;
}
break;
case CLIENT:
clientCount++;
if (isHAFlag(nodeGroupCreate)) {
warning = true;
}
break;
case NONE:
warning = true;
break;
default:
}
}
if ((masterCount < 1 || masterCount > 2) || (workerCount < 1 || workerCount > 2) ||
clientCount > 1) {
warning = true;
}
if (!validated) {
showFailedMsg(clusterCreate.getName(), failedMsgList);
} else if (warning || warningMsgList != null) {
// If warning is true,show waring message.
if (!showWarningMsg(clusterCreate.getName(), warningMsgList)) {
// When exist warning message,whether to proceed
validated = false;
}
}
return validated;
}
}
private boolean isContinue(String clusterName, String operateType, String promptMsg) {
if (this.alwaysAnswerYes) {
return true;
}
boolean continueCreate = true;
boolean continueLoop = true;
String readMsg = "";
try {
ConsoleReader reader = new ConsoleReader();
// Set prompt message
reader.setDefaultPrompt(promptMsg);
int k = 0;
while (continueLoop) {
if (k >= 3) {
continueCreate = false;
break;
}
// Read user input
readMsg = reader.readLine();
if (readMsg.trim().equalsIgnoreCase("yes")
|| readMsg.trim().equalsIgnoreCase("y")) {
continueLoop = false;
} else if (readMsg.trim().equalsIgnoreCase("no")
|| readMsg.trim().equalsIgnoreCase("n")) {
continueLoop = false;
continueCreate = false;
} else {
k++;
}
}
} catch (Exception e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
clusterName, operateType,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
continueCreate = false;
}
return continueCreate;
}
private NodeGroupRole getNodeGroupRole(NodeGroupCreate nodeGroupCreate) {
//Find roles list from current NodeGroupCreate instance.
List<String> roles = nodeGroupCreate.getRoles();
for (NodeGroupRole role : NodeGroupRole.values()) {
if (matchRole(role, roles)) {
return role;
}
}
return NodeGroupRole.NONE;
}
/**
* Check the roles was introduced, whether matching with system's specialize
* role.
*/
private boolean matchRole(NodeGroupRole role, List<String> roles) {
List<String> matchRoles = new LinkedList<String>();
switch (role) {
case MASTER:
if (roles.size() == 1) {
String r = roles.get(0);
return Constants.ROLE_HADOOP_NAME_NODE.equals(r) ||
Constants.ROLE_HADOOP_JOB_TRACKER.equals(r);
} else if (roles.size() == 2) {
matchRoles.add(Constants.ROLE_HADOOP_NAME_NODE);
matchRoles.add(Constants.ROLE_HADOOP_JOB_TRACKER);
matchRoles.removeAll(roles);
return matchRoles.size() == 0 ? true : false;
}
return false;
case WORKER:
if (roles.size() == 1) {
if (Constants.ROLE_HADOOP_DATANODE.equals(roles.get(0)) ||
Constants.ROLE_HADOOP_TASKTRACKER.equals(roles.get(0))) {
return true;
}
return false;
} else {
matchRoles.add(Constants.ROLE_HADOOP_DATANODE);
matchRoles.add(Constants.ROLE_HADOOP_TASKTRACKER);
matchRoles.removeAll(roles);
return matchRoles.size() == 0 ? true : false;
}
case CLIENT:
if (roles.size() < 1 || roles.size() > 4) {
return false;
} else {
matchRoles.add(Constants.ROLE_HADOOP_CLIENT);
matchRoles.add(Constants.ROLE_HIVE);
matchRoles.add(Constants.ROLE_HIVE_SERVER);
matchRoles.add(Constants.ROLE_PIG);
int diffNum = matchRoles.size() - roles.size();
matchRoles.removeAll(roles);
return roles.contains(Constants.ROLE_HADOOP_CLIENT)
&& (diffNum >= 0) && (diffNum == matchRoles.size()) ? true
: false;
}
}
return false;
}
private void showWarningMsg() {
System.out.println(Constants.PARAM_CLUSTER_WARNING);
}
private boolean checkInstanceNum(NodeGroupCreate nodeGroup,
List<String> failedMsgList) {
boolean validated = true;
if (nodeGroup.getInstanceNum() < 0) {
validated = false;
collectInstanceNumInvalidateMsg(nodeGroup, failedMsgList);
}
return validated;
}
private void collectInstanceNumInvalidateMsg(NodeGroupCreate nodeGroup,
List<String> failedMsgList) {
failedMsgList.add(new StringBuilder().append(nodeGroup.getName())
.append(".").append("instanceNum=")
.append(nodeGroup.getInstanceNum()).toString());
}
private boolean checkNodeGroupRoles(NodeGroupCreate nodeGroup,
List<String> distroRoles, List<String> failedMsgList) {
List<String> roles = nodeGroup.getRoles();
boolean validated = true;
StringBuilder rolesMsg = new StringBuilder();
for (String role : roles) {
if (!distroRoles.contains(role)) {
validated = false;
rolesMsg.append(",").append(role);
}
}
if (!validated) {
rolesMsg.replace(0, 1, "");
failedMsgList.add(new StringBuilder().append(nodeGroup.getName())
.append(".").append("roles=").append("\"")
.append(rolesMsg.toString()).append("\"").toString());
}
return validated;
}
private List<String> findDistroRoles(ClusterCreate clusterCreate) {
DistroRead distroRead = null;
distroRead =
distroRestClient
.get(clusterCreate.getDistro() != null ? clusterCreate
.getDistro() : Constants.DEFAULT_DISTRO);
if (distroRead != null) {
return distroRead.getRoles();
} else {
return null;
}
}
private void showFailedMsg(String name, List<String> failedMsgList) {
//cluster creation failed message.
StringBuilder failedMsg = new StringBuilder();
failedMsg.append(Constants.INVALID_VALUE);
if (failedMsgList.size() > 1) {
failedMsg.append("s");
}
failedMsg.append(" ");
StringBuilder tmpMsg = new StringBuilder();
for (String msg : failedMsgList) {
tmpMsg.append(",").append(msg);
}
tmpMsg.replace(0, 1, "");
failedMsg.append(tmpMsg);
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_CREATE, Constants.OUTPUT_OP_RESULT_FAIL,
failedMsg.toString());
}
private void validateConfiguration(ClusterCreate cluster, boolean skipConfigValidation, List<String> warningMsgList) {
// validate blacklist
ValidateResult blackListResult = validateBlackList(cluster);
if (blackListResult != null) {
addBlackListWarning(blackListResult, warningMsgList);
}
if (!skipConfigValidation) {
// validate whitelist
ValidateResult whiteListResult = validateWhiteList(cluster);
addWhiteListWarning(cluster.getName(), whiteListResult, warningMsgList);
} else {
cluster.setValidateConfig(false);
}
}
private ValidateResult validateBlackList(ClusterCreate cluster) {
return validateConfiguration(cluster, ValidationType.BLACK_LIST);
}
private ValidateResult validateWhiteList(ClusterCreate cluster) {
return validateConfiguration(cluster, ValidationType.WHITE_LIST);
}
private ValidateResult validateConfiguration(ClusterCreate cluster, ValidationType validationType) {
ValidateResult validateResult = new ValidateResult();
// validate cluster level Configuration
ValidateResult vr = null;
if (cluster.getConfiguration() != null && !cluster.getConfiguration().isEmpty()) {
vr = AppConfigValidationUtils.validateConfig(validationType, cluster.getConfiguration());
if (vr.getType() != ValidateResult.Type.VALID) {
validateResult.setType(vr.getType());
validateResult.setFailureNames(vr.getFailureNames());
}
}
// validate nodegroup level Configuration
for (NodeGroupCreate nodeGroup : cluster.getNodeGroups()) {
if (nodeGroup.getConfiguration() != null && !nodeGroup.getConfiguration().isEmpty()) {
vr = AppConfigValidationUtils.validateConfig(validationType, nodeGroup.getConfiguration());
if (vr.getType() != ValidateResult.Type.VALID) {
validateResult.setType(vr.getType());
List<String> failureNames = new LinkedList<String>();
failureNames.addAll(validateResult.getFailureNames());
for (String name : vr.getFailureNames()) {
if (!failureNames.contains(name)) {
failureNames.add(name);
}
}
validateResult.setFailureNames(vr.getFailureNames());
}
}
}
return validateResult;
}
private void addWhiteListWarning(final String clusterName, ValidateResult whiteListResult,
List<String> warningMsgList) {
if (whiteListResult.getType() == ValidateResult.Type.WHITE_LIST_INVALID_NAME) {
String warningMsg =
getValidateWarningMsg(whiteListResult.getFailureNames(),
Constants.PARAM_CLUSTER_NOT_IN_WHITE_LIST_WARNING);
if (warningMsgList != null) {
warningMsgList.add(warningMsg);
}
}
}
private void addBlackListWarning(ValidateResult blackListResult, List<String> warningList) {
if (blackListResult.getType() == ValidateResult.Type.NAME_IN_BLACK_LIST) {
String warningMsg =
getValidateWarningMsg(blackListResult.getFailureNames(), Constants.PARAM_CLUSTER_IN_BLACK_LIST_WARNING);
if (warningList != null)
warningList.add(warningMsg);
}
}
private String getValidateWarningMsg(List<String> failureNames, String warningMsg) {
StringBuilder warningMsgBuff = new StringBuilder();
if (failureNames != null && !failureNames.isEmpty()) {
warningMsgBuff.append("Warning: ");
for (String failureName : failureNames) {
warningMsgBuff.append(failureName).append(", ");
}
warningMsgBuff.delete(warningMsgBuff.length() - 2, warningMsgBuff.length());
if (failureNames.size() > 1) {
warningMsgBuff.append(" are ");
} else {
warningMsgBuff.append(" is ");
}
warningMsgBuff.append(warningMsg);
}
return warningMsgBuff.toString();
}
private boolean showWarningMsg(String clusterName, List<String> warningMsgList) {
if (warningMsgList != null && !warningMsgList.isEmpty()) {
for (String message : warningMsgList) {
System.out.println(message);
}
if (!isContinue(clusterName, Constants.OUTPUT_OP_CREATE, Constants.PARAM_PROMPT_CONTINUE_MESSAGE)) {
return false;
}
}
return true;
}
private boolean isHAFlag(NodeGroupCreate nodeGroupCreate) {
return !CommandsUtils.isBlank(nodeGroupCreate.getHaFlag())
&& !nodeGroupCreate.getHaFlag().equalsIgnoreCase("off");
}
private boolean validateHAInfo(NodeGroupCreate[] nodeGroups) {
List<String> haFlagList = Arrays.asList("off","on","ft");
if (nodeGroups != null){
for(NodeGroupCreate group : nodeGroups){
if (!haFlagList.contains(group.getHaFlag().toLowerCase())){
return false;
}
}
}
return true;
}
}
|
public void createCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name,
@CliOption(key = { "distro" }, mandatory = false, help = "Hadoop Distro") final String distro,
@CliOption(key = { "specFile" }, mandatory = false, help = "The spec file name path") final String specFilePath,
@CliOption(key = { "rpNames" }, mandatory = false, help = "Resource Pools for the cluster: use \",\" among names.") final String rpNames,
@CliOption(key = { "dsNames" }, mandatory = false, help = "Datastores for the cluster: use \",\" among names.") final String dsNames,
@CliOption(key = { "networkName" }, mandatory = false, help = "Network Name") final String networkName,
@CliOption(key = { "topology" }, mandatory = false, help = "Please specify the topology type: HVE or RACK_HOST or HOST_AS_RACK") final String topology,
@CliOption(key = { "resume" }, mandatory = false, specifiedDefaultValue = "true", unspecifiedDefaultValue = "false", help = "flag to resume cluster creation") final boolean resume,
@CliOption(key = { "skipConfigValidation" }, mandatory = false, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = "Skip cluster configuration validation. ") final boolean skipConfigValidation,
@CliOption(key = { "yes" }, mandatory = false, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = "Answer 'yes' to all Y/N questions. ") final boolean alwaysAnswerYes) {
this.alwaysAnswerYes = alwaysAnswerYes;
//validate the name
if (name.indexOf("-") != -1) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_CREATE, Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_CLUSTER
+ Constants.PARAM_NOT_CONTAIN_HORIZONTAL_LINE);
return;
}
//process resume
if (resume) {
resumeCreateCluster(name);
return;
}
// build ClusterCreate object
ClusterCreate clusterCreate = new ClusterCreate();
clusterCreate.setName(name);
if (topology != null) {
try {
clusterCreate.setTopologyPolicy(TopologyType.valueOf(topology));
} catch (IllegalArgumentException ex) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_CREATE, Constants.OUTPUT_OP_RESULT_FAIL,
Constants.INVALID_VALUE + " " + "topologyType=" + topology);
return;
}
} else {
clusterCreate.setTopologyPolicy(null);
}
if (distro != null) {
List<String> distroNames = getDistroNames();
if (validName(distro, distroNames)) {
clusterCreate.setDistro(distro);
} else {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL, Constants.PARAM_DISTRO
+ Constants.PARAM_NOT_SUPPORTED + distroNames);
return;
}
}
clusterCreate.setType(Enum.valueOf(ClusterType.class, "HADOOP"));
if (rpNames != null) {
List<String> rpNamesList = CommandsUtils.inputsConvert(rpNames);
if (rpNamesList.isEmpty()) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.INPUT_RPNAMES_PARAM + Constants.MULTI_INPUTS_CHECK);
return;
} else {
clusterCreate.setRpNames(rpNamesList);
}
}
if (dsNames != null) {
List<String> dsNamesList = CommandsUtils.inputsConvert(dsNames);
if (dsNamesList.isEmpty()) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.INPUT_DSNAMES_PARAM + Constants.MULTI_INPUTS_CHECK);
return;
} else {
clusterCreate.setDsNames(dsNamesList);
}
}
List<String> warningMsgList = new ArrayList<String>();
List<String> networkNames = null;
try {
if (specFilePath != null) {
ClusterCreate clusterSpec =
CommandsUtils.getObjectByJsonString(ClusterCreate.class, CommandsUtils.dataFromFile(specFilePath));
clusterCreate.setExternalHDFS(clusterSpec.getExternalHDFS());
clusterCreate.setNodeGroups(clusterSpec.getNodeGroups());
clusterCreate.setConfiguration(clusterSpec.getConfiguration());
validateConfiguration(clusterCreate, skipConfigValidation, warningMsgList);
if (!validateHAInfo(clusterCreate.getNodeGroups())){
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_CLUSTER_SPEC_HA_ERROR + specFilePath);
return;
}
}
networkNames = getNetworkNames();
} catch (Exception e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
return;
}
if (networkNames.isEmpty()) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_CREATE, Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_NETWORK_NAME + Constants.PARAM_NOT_EXISTED);
return;
} else {
if (networkName != null) {
if (validName(networkName, networkNames)) {
clusterCreate.setNetworkName(networkName);
} else {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_NETWORK_NAME
+ Constants.PARAM_NOT_SUPPORTED + networkNames);
return;
}
} else {
if (networkNames.size() == 1) {
clusterCreate.setNetworkName(networkNames.get(0));
} else {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_NETWORK_NAME
+ Constants.PARAM_NOT_SPECIFIED);
return;
}
}
}
// Validate that the specified file is correct json format and proper value.
if (specFilePath != null) {
if (!validateClusterCreate(clusterCreate)) {
return;
}
}
// process topology option
if (topology == null) {
clusterCreate.setTopologyPolicy(TopologyType.NONE);
} else {
try {
clusterCreate.setTopologyPolicy(TopologyType.valueOf(topology));
} catch (IllegalArgumentException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL, Constants.INPUT_TOPOLOGY_INVALID_VALUE);
System.out.println("Please specify the topology type: HVE or RACK_HOST or HOST_AS_RACK");
return;
}
}
// rest invocation
try {
if (!showWarningMsg(clusterCreate.getName(), warningMsgList)) {
return;
}
restClient.create(clusterCreate);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_RESULT_CREAT);
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
}
}
@CliCommand(value = "cluster list", help = "Get cluster information")
public void getCluster(
@CliOption(key = { "name" }, mandatory = false, help = "The cluster name") final String name,
@CliOption(key = { "detail" }, mandatory = false, specifiedDefaultValue = "true", unspecifiedDefaultValue = "false", help = "flag to show node information") final boolean detail) {
// rest invocation
try {
if (name == null) {
ClusterRead[] clusters = restClient.getAll();
if (clusters != null) {
prettyOutputClustersInfo(clusters, detail);
}
} else {
ClusterRead cluster = restClient.get(name);
if (cluster != null) {
prettyOutputClusterInfo(cluster, detail);
}
}
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_LIST, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
@CliCommand(value = "cluster export --spec", help = "Export cluster specification")
public void exportClusterSpec(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name,
@CliOption(key = { "output" }, mandatory = false, help = "The output file name") final String fileName) {
// rest invocation
try {
ClusterCreate cluster = restClient.getSpec(name);
if (cluster != null) {
CommandsUtils.prettyJsonOutput(cluster, fileName);
}
} catch (Exception e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_EXPORT, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
@CliCommand(value = "cluster delete", help = "Delete a cluster")
public void deleteCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name) {
//rest invocation
try {
restClient.delete(name);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_RESULT_DELETE);
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_DELETE, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
@CliCommand(value = "cluster start", help = "Start a cluster")
public void startCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String clusterName,
@CliOption(key = { "nodeGroupName" }, mandatory = false, help = "The node group name") final String nodeGroupName,
@CliOption(key = { "nodeName" }, mandatory = false, help = "The node name") final String nodeName) {
Map<String, String> queryStrings = new HashMap<String, String>();
queryStrings
.put(Constants.QUERY_ACTION_KEY, Constants.QUERY_ACTION_START);
//rest invocation
try {
if (!validateNodeGroupName(nodeGroupName)) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_START, Constants.OUTPUT_OP_RESULT_FAIL,
"invalid node group name");
return;
}
if (!validateNodeName(clusterName, nodeGroupName, nodeName)) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_START, Constants.OUTPUT_OP_RESULT_FAIL,
"invalid node name");
return;
}
String groupName = nodeGroupName;
String fullNodeName = nodeName;
if (nodeName != null) {
if (nodeGroupName == null) {
groupName = extractNodeGroupName(nodeName);
if (groupName == null) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_START, Constants.OUTPUT_OP_RESULT_FAIL,
"missing node group name");
return;
}
} else {
fullNodeName = autoCompleteNodeName(clusterName, nodeGroupName, nodeName);
}
}
String resource = getClusterResourceName(clusterName, groupName, fullNodeName);
if (resource != null) {
restClient.actionOps(resource, clusterName, queryStrings);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_RESULT_START);
}
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_START, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
@CliCommand(value = "cluster stop", help = "Stop a cluster")
public void stopCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String clusterName,
@CliOption(key = { "nodeGroupName" }, mandatory = false, help = "The node group name") final String nodeGroupName,
@CliOption(key = { "nodeName" }, mandatory = false, help = "The node name") final String nodeName) {
Map<String, String> queryStrings = new HashMap<String, String>();
queryStrings.put(Constants.QUERY_ACTION_KEY, Constants.QUERY_ACTION_STOP);
//rest invocation
try {
if (!validateNodeGroupName(nodeGroupName)) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_STOP, Constants.OUTPUT_OP_RESULT_FAIL,
"invalid node group name");
return;
}
if (!validateNodeName(clusterName, nodeGroupName, nodeName)) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_STOP, Constants.OUTPUT_OP_RESULT_FAIL,
"invalid node name");
return;
}
String groupName = nodeGroupName;
String fullNodeName = nodeName;
if (nodeName != null) {
if (nodeGroupName == null) {
groupName = extractNodeGroupName(nodeName);
if (groupName == null) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_STOP, Constants.OUTPUT_OP_RESULT_FAIL,
"missing node group name");
return;
}
} else {
fullNodeName = autoCompleteNodeName(clusterName, nodeGroupName, nodeName);
}
}
String resource = getClusterResourceName(clusterName, groupName, fullNodeName);
if (resource != null) {
restClient.actionOps(resource, clusterName, queryStrings);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_RESULT_STOP);
}
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, clusterName,
Constants.OUTPUT_OP_STOP, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
@CliCommand(value = "cluster resize", help = "Resize a cluster")
public void resizeCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name,
@CliOption(key = { "nodeGroup" }, mandatory = true, help = "The node group name") final String nodeGroup,
@CliOption(key = { "instanceNum" }, mandatory = true, help = "The resized number of instances. It should be larger that existing one") final int instanceNum) {
if (instanceNum > 1) {
try {
restClient.resize(name, nodeGroup, instanceNum);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_RESULT_RESIZE);
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
name, Constants.OUTPUT_OP_RESIZE,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
}
} else {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_RESIZE, Constants.OUTPUT_OP_RESULT_FAIL,
Constants.INVALID_VALUE + " instanceNum=" + instanceNum);
}
}
@CliCommand(value = "cluster target", help = "Set or query target cluster to run commands")
public void targetCluster(
@CliOption(key = { "name" }, mandatory = false, help = "The cluster name") final String name,
@CliOption(key = { "info" }, mandatory = false, specifiedDefaultValue = "true", unspecifiedDefaultValue = "false", help = "flag to show target information") final boolean info) {
ClusterRead cluster = null;
try {
if (info) {
if (name != null) {
System.out.println("Warning: can't specify option --name and --info at the same time");
return;
}
String fsUrl = hadoopConfiguration.get("fs.default.name");
String jtUrl = hadoopConfiguration.get("mapred.job.tracker");
if ((fsUrl == null || fsUrl.length() == 0) && (jtUrl == null || jtUrl.length() == 0)) {
System.out.println("There is no targeted cluster. Please use \"cluster target --name\" to target first");
return;
}
if(targetClusterName != null && targetClusterName.length() > 0){
System.out.println("Cluster : " + targetClusterName);
}
if (fsUrl != null && fsUrl.length() > 0) {
System.out.println("HDFS url : " + fsUrl);
}
if (jtUrl != null && jtUrl.length() > 0) {
System.out.println("Job Tracker url : " + jtUrl);
}
if (hiveInfo != null && hiveInfo.length() > 0) {
System.out.println("Hive server info: " + hiveInfo);
}
} else {
if (name == null) {
ClusterRead[] clusters = restClient.getAll();
if (clusters != null && clusters.length > 0) {
cluster = clusters[0];
}
} else {
cluster = restClient.get(name);
}
if (cluster == null) {
System.out.println("Failed to target cluster: The cluster " + name + "is not found");
setFsURL("");
setJobTrackerURL("");
this.setHiveServer("");
} else {
targetClusterName = cluster.getName();
for (NodeGroupRead nodeGroup : cluster.getNodeGroups()) {
for (String role : nodeGroup.getRoles()) {
if (role.equals("hadoop_namenode")) {
List<NodeRead> nodes = nodeGroup.getInstances();
if (nodes != null && nodes.size() > 0) {
String nameNodeIP = nodes.get(0).getIp();
setNameNode(nameNodeIP);
} else {
throw new CliRestException("no name node available");
}
}
if (role.equals("hadoop_jobtracker")) {
List<NodeRead> nodes = nodeGroup.getInstances();
if (nodes != null && nodes.size() > 0) {
String jobTrackerIP = nodes.get(0).getIp();
setJobTracker(jobTrackerIP);
} else {
throw new CliRestException("no job tracker available");
}
}
if (role.equals("hive_server")) {
List<NodeRead> nodes = nodeGroup.getInstances();
if (nodes != null && nodes.size() > 0) {
String hiveServerIP = nodes.get(0).getIp();
setHiveServer(hiveServerIP);
} else {
throw new CliRestException("no hive server available");
}
}
}
}
if (cluster.getExternalHDFS() != null && !cluster.getExternalHDFS().isEmpty()) {
setFsURL(cluster.getExternalHDFS());
}
}
}
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_TARGET,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
setFsURL("");
setJobTrackerURL("");
this.setHiveServer("");
}
}
private void setNameNode(String nameNodeAddress) {
String hdfsUrl = "hdfs://" + nameNodeAddress + ":8020";
setFsURL(hdfsUrl);
}
private void setFsURL(String fsURL) {
hadoopConfiguration.set("fs.default.name", fsURL);
}
private void setJobTracker(String jobTrackerAddress) {
String jobTrackerUrl = jobTrackerAddress + ":8021";
setJobTrackerURL(jobTrackerUrl);
}
private void setJobTrackerURL(String jobTrackerUrl){
hadoopConfiguration.set("mapred.job.tracker", jobTrackerUrl);
}
private void setHiveServer(String hiveServerAddress) {
try {
hiveInfo = hiveCommands.config(hiveServerAddress, 10000, null);
} catch (Exception e) {
throw new CliRestException("faild to set hive server address");
}
}
@CliCommand(value = "cluster config", help = "Config an existing cluster")
public void configCluster(
@CliOption(key = { "name" }, mandatory = true, help = "The cluster name") final String name,
@CliOption(key = { "specFile" }, mandatory = true, help = "The spec file name path") final String specFilePath,
@CliOption(key = { "skipConfigValidation" }, mandatory = false, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = "Skip cluster configuration validation. ") final boolean skipConfigValidation,
@CliOption(key = { "yes" }, mandatory = false, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = "Answer 'yes' to all Y/N questions. ") final boolean alwaysAnswerYes) {
this.alwaysAnswerYes = alwaysAnswerYes;
//validate the name
if (name.indexOf("-") != -1) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_CONFIG,
Constants.OUTPUT_OP_RESULT_FAIL, Constants.PARAM_CLUSTER + Constants.PARAM_NOT_CONTAIN_HORIZONTAL_LINE);
return;
}
try {
ClusterRead clusterRead = restClient.get(name);
// build ClusterCreate object
ClusterCreate clusterConfig = new ClusterCreate();
clusterConfig.setName(clusterRead.getName());
ClusterCreate clusterSpec =
CommandsUtils.getObjectByJsonString(ClusterCreate.class, CommandsUtils.dataFromFile(specFilePath));
clusterConfig.setNodeGroups(clusterSpec.getNodeGroups());
clusterConfig.setConfiguration(clusterSpec.getConfiguration());
clusterConfig.setExternalHDFS(clusterSpec.getExternalHDFS());
List<String> warningMsgList = new ArrayList<String>();
validateConfiguration(clusterConfig, skipConfigValidation, warningMsgList);
// add a confirm message for running job
warningMsgList.add("Warning: " + Constants.PARAM_CLUSTER_CONFIG_RUNNING_JOB_WARNING);
if (!showWarningMsg(clusterConfig.getName(), warningMsgList)) {
return;
}
restClient.configCluster(clusterConfig);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_RESULT_CONFIG);
} catch (Exception e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name, Constants.OUTPUT_OP_CONFIG,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
return;
}
}
private String getClusterResourceName(String cluster, String nodeGroup, String node) {
assert cluster != null; // Spring shell guarantees this
if (node != null && nodeGroup == null) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_NODES_IN_CLUSTER, cluster,
Constants.OUTPUT_OP_START, Constants.OUTPUT_OP_RESULT_FAIL,
Constants.OUTPUT_OP_NODEGROUP_MISSING);
return null;
}
StringBuilder res = new StringBuilder();
res.append(cluster);
if (nodeGroup != null) {
res.append("/nodegroup/").append(nodeGroup);
if (node != null) {
res.append("/node/").append(node);
}
}
return res.toString();
}
private boolean validateNodeName(String cluster, String group, String node) {
if (node != null) {
String[] parts = node.split("-");
if (parts.length == 1) {
return true;
}
if (parts.length == 3) {
if (!parts[0].equals(cluster)) {
return false;
}
if (group != null && !parts[1].equals(group)) {
return false;
}
return true;
}
return false;
}
return true;
}
private boolean validateNodeGroupName(String group) {
if (group != null) {
return group.indexOf("-") == -1;
}
return true;
}
private String autoCompleteNodeName(String cluster, String group, String node) {
assert cluster != null;
assert group != null;
assert node != null;
if (node.indexOf("-") == -1) {
StringBuilder sb = new StringBuilder();
sb.append(cluster).append("-").append(group).append("-").append(node);
return sb.toString();
}
return node;
}
private String extractNodeGroupName(String node) {
String[] parts = node.split("-");
if (parts.length == 3) {
return parts[1];
}
return null;
}
private void resumeCreateCluster(final String name) {
Map<String, String> queryStrings = new HashMap<String, String>();
queryStrings.put(Constants.QUERY_ACTION_KEY,
Constants.QUERY_ACTION_RESUME);
try {
restClient.actionOps(name, queryStrings);
CommandsUtils.printCmdSuccess(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_RESULT_RESUME);
} catch (CliRestException e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_RESUME, Constants.OUTPUT_OP_RESULT_FAIL,
e.getMessage());
}
}
private List<String> getNetworkNames() {
List<String> networkNames = new ArrayList<String>(0);
NetworkRead[] networks = networkRestClient.getAll(false);
if (networks != null) {
for (NetworkRead network : networks)
networkNames.add(network.getName());
}
return networkNames;
}
private List<String> getDistroNames() {
List<String> distroNames = new ArrayList<String>(0);
DistroRead[] distros = distroRestClient.getAll();
if (distros != null) {
for (DistroRead distro : distros)
distroNames.add(distro.getName());
}
return distroNames;
}
private boolean validName(String inputName, List<String> validNames) {
for (String name : validNames) {
if (name.equals(inputName)) {
return true;
}
}
return false;
}
private void prettyOutputClusterInfo(ClusterRead cluster, boolean detail) {
TopologyType topology = cluster.getTopologyPolicy();
if (topology == null || topology == TopologyType.NONE) {
System.out.printf("name: %s, distro: %s, status: %s",
cluster.getName(), cluster.getDistro(), cluster.getStatus());
} else {
System.out.printf("name: %s, distro: %s, topology: %s, status: %s",
cluster.getName(), cluster.getDistro(), topology, cluster.getStatus());
}
System.out.println();
if(cluster.getExternalHDFS() != null && !cluster.getExternalHDFS().isEmpty()) {
System.out.printf("external HDFS: %s\n", cluster.getExternalHDFS());
}
LinkedHashMap<String, List<String>> ngColumnNamesWithGetMethodNames =
new LinkedHashMap<String, List<String>>();
List<NodeGroupRead> nodegroups = cluster.getNodeGroups();
if (nodegroups != null) {
ngColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_NAME, Arrays.asList("getName"));
ngColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_ROLES, Arrays.asList("getRoles"));
ngColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_INSTANCE,
Arrays.asList("getInstanceNum"));
ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_CPU,
Arrays.asList("getCpuNum"));
ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_MEM,
Arrays.asList("getMemCapacityMB"));
ngColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_TYPE,
Arrays.asList("getStorage", "getType"));
ngColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_SIZE,
Arrays.asList("getStorage", "getSizeGB"));
try {
if (detail) {
LinkedHashMap<String, List<String>> nColumnNamesWithGetMethodNames =
new LinkedHashMap<String, List<String>>();
nColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_NAME,
Arrays.asList("getName"));
if (topology == TopologyType.RACK_AS_RACK || topology == TopologyType.HVE) {
nColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_RACK,
Arrays.asList("getRack"));
}
nColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_HOST,
Arrays.asList("getHostName"));
nColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_IP, Arrays.asList("getIp"));
nColumnNamesWithGetMethodNames.put(
Constants.FORMAT_TABLE_COLUMN_STATUS,
Arrays.asList("getStatus"));
for (NodeGroupRead nodegroup : nodegroups) {
CommandsUtils.printInTableFormat(
ngColumnNamesWithGetMethodNames,
new NodeGroupRead[] { nodegroup },
Constants.OUTPUT_INDENT);
List<NodeRead> nodes = nodegroup.getInstances();
if (nodes != null) {
System.out.println();
CommandsUtils.printInTableFormat(
nColumnNamesWithGetMethodNames, nodes.toArray(),
new StringBuilder().append(Constants.OUTPUT_INDENT)
.append(Constants.OUTPUT_INDENT).toString());
}
System.out.println();
}
} else
CommandsUtils.printInTableFormat(
ngColumnNamesWithGetMethodNames, nodegroups.toArray(),
Constants.OUTPUT_INDENT);
} catch (Exception e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
cluster.getName(), Constants.OUTPUT_OP_LIST,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
}
}
}
private void prettyOutputClustersInfo(ClusterRead[] clusters, boolean detail) {
for (ClusterRead cluster : clusters) {
prettyOutputClusterInfo(cluster, detail);
System.out.println();
}
}
/**
* Validate nodeGroupCreates member formats and values in the ClusterCreate.
*/
private boolean validateClusterCreate(ClusterCreate clusterCreate) {
// validation status
boolean validated = true;
// show warning message
boolean warning = false;
//role count
int masterCount = 0, workerCount = 0, clientCount = 0;
//Find NodeGroupCreate array from current ClusterCreate instance.
NodeGroupCreate[] nodeGroupCreates = clusterCreate.getNodeGroups();
if (nodeGroupCreates == null || nodeGroupCreates.length == 0) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
clusterCreate.getName(), Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL, Constants.MULTI_INPUTS_CHECK);
return !validated;
} else {
//used for collecting failed message.
List<String> failedMsgList = new LinkedList<String>();
List<String> warningMsgList = new LinkedList<String>();
//find distro roles.
List<String> distroRoles = findDistroRoles(clusterCreate);
if (distroRoles == null) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
clusterCreate.getName(), Constants.OUTPUT_OP_CREATE,
Constants.OUTPUT_OP_RESULT_FAIL,
Constants.PARAM_NO_DISTRO_AVAILABLE);
return !validated;
}
if (nodeGroupCreates.length < 2 || nodeGroupCreates.length > 5) {
warningMsgList.add(Constants.PARAM_CLUSTER_WARNING);
warning = true;
}
// check external HDFS
if (clusterCreate.hasHDFSUrlConfigured() && !clusterCreate.validateHDFSUrl()) {
failedMsgList.add(new StringBuilder()
.append("externalHDFS=")
.append(clusterCreate.getExternalHDFS()).toString());
validated = false;
}
// check placement policies
if (!clusterCreate.validateNodeGroupPlacementPolicies(failedMsgList, warningMsgList)) {
validated = false;
}
if (!clusterCreate.validateNodeGroupRoles(failedMsgList)) {
validated = false;
}
for (NodeGroupCreate nodeGroupCreate : nodeGroupCreates) {
// check node group's instanceNum
if (!checkInstanceNum(nodeGroupCreate, failedMsgList)) {
validated = false;
}
// check node group's roles
if (!checkNodeGroupRoles(nodeGroupCreate, distroRoles,
failedMsgList)) {
validated = false;
}
// get node group role .
NodeGroupRole role = getNodeGroupRole(nodeGroupCreate);
switch (role) {
case MASTER:
masterCount++;
if (nodeGroupCreate.getInstanceNum() >= 0
&& nodeGroupCreate.getInstanceNum() != 1) {
validated = false;
collectInstanceNumInvalidateMsg(nodeGroupCreate,
failedMsgList);
}
break;
case WORKER:
workerCount++;
if (nodeGroupCreate.getInstanceNum() == 0) {
validated = false;
collectInstanceNumInvalidateMsg(nodeGroupCreate,
failedMsgList);
} else if (isHAFlag(nodeGroupCreate)) {
warning = true;
}
break;
case CLIENT:
clientCount++;
if (isHAFlag(nodeGroupCreate)) {
warning = true;
}
break;
case NONE:
warning = true;
break;
default:
}
}
if ((masterCount < 1 || masterCount > 2) || (workerCount < 1 || workerCount > 2) ||
clientCount > 1) {
warning = true;
}
if (!validated) {
showFailedMsg(clusterCreate.getName(), failedMsgList);
} else if (warning || warningMsgList != null) {
// If warning is true,show waring message.
if (!showWarningMsg(clusterCreate.getName(), warningMsgList)) {
// When exist warning message,whether to proceed
validated = false;
}
}
return validated;
}
}
private boolean isContinue(String clusterName, String operateType, String promptMsg) {
if (this.alwaysAnswerYes) {
return true;
}
boolean continueCreate = true;
boolean continueLoop = true;
String readMsg = "";
try {
ConsoleReader reader = new ConsoleReader();
// Set prompt message
reader.setDefaultPrompt(promptMsg);
int k = 0;
while (continueLoop) {
if (k >= 3) {
continueCreate = false;
break;
}
// Read user input
readMsg = reader.readLine();
if (readMsg.trim().equalsIgnoreCase("yes")
|| readMsg.trim().equalsIgnoreCase("y")) {
continueLoop = false;
} else if (readMsg.trim().equalsIgnoreCase("no")
|| readMsg.trim().equalsIgnoreCase("n")) {
continueLoop = false;
continueCreate = false;
} else {
k++;
}
}
} catch (Exception e) {
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER,
clusterName, operateType,
Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
continueCreate = false;
}
return continueCreate;
}
private NodeGroupRole getNodeGroupRole(NodeGroupCreate nodeGroupCreate) {
//Find roles list from current NodeGroupCreate instance.
List<String> roles = nodeGroupCreate.getRoles();
for (NodeGroupRole role : NodeGroupRole.values()) {
if (matchRole(role, roles)) {
return role;
}
}
return NodeGroupRole.NONE;
}
/**
* Check the roles was introduced, whether matching with system's specialize
* role.
*/
private boolean matchRole(NodeGroupRole role, List<String> roles) {
List<String> matchRoles = new LinkedList<String>();
switch (role) {
case MASTER:
if (roles.size() == 1) {
String r = roles.get(0);
return Constants.ROLE_HADOOP_NAME_NODE.equals(r) ||
Constants.ROLE_HADOOP_JOB_TRACKER.equals(r);
} else if (roles.size() == 2) {
matchRoles.add(Constants.ROLE_HADOOP_NAME_NODE);
matchRoles.add(Constants.ROLE_HADOOP_JOB_TRACKER);
matchRoles.removeAll(roles);
return matchRoles.size() == 0 ? true : false;
}
return false;
case WORKER:
if (roles.size() == 1) {
if (Constants.ROLE_HADOOP_DATANODE.equals(roles.get(0)) ||
Constants.ROLE_HADOOP_TASKTRACKER.equals(roles.get(0))) {
return true;
}
return false;
} else {
matchRoles.add(Constants.ROLE_HADOOP_DATANODE);
matchRoles.add(Constants.ROLE_HADOOP_TASKTRACKER);
matchRoles.removeAll(roles);
return matchRoles.size() == 0 ? true : false;
}
case CLIENT:
if (roles.size() < 1 || roles.size() > 4) {
return false;
} else {
matchRoles.add(Constants.ROLE_HADOOP_CLIENT);
matchRoles.add(Constants.ROLE_HIVE);
matchRoles.add(Constants.ROLE_HIVE_SERVER);
matchRoles.add(Constants.ROLE_PIG);
int diffNum = matchRoles.size() - roles.size();
matchRoles.removeAll(roles);
return roles.contains(Constants.ROLE_HADOOP_CLIENT)
&& (diffNum >= 0) && (diffNum == matchRoles.size()) ? true
: false;
}
}
return false;
}
private void showWarningMsg() {
System.out.println(Constants.PARAM_CLUSTER_WARNING);
}
private boolean checkInstanceNum(NodeGroupCreate nodeGroup,
List<String> failedMsgList) {
boolean validated = true;
if (nodeGroup.getInstanceNum() < 0) {
validated = false;
collectInstanceNumInvalidateMsg(nodeGroup, failedMsgList);
}
return validated;
}
private void collectInstanceNumInvalidateMsg(NodeGroupCreate nodeGroup,
List<String> failedMsgList) {
failedMsgList.add(new StringBuilder().append(nodeGroup.getName())
.append(".").append("instanceNum=")
.append(nodeGroup.getInstanceNum()).toString());
}
private boolean checkNodeGroupRoles(NodeGroupCreate nodeGroup,
List<String> distroRoles, List<String> failedMsgList) {
List<String> roles = nodeGroup.getRoles();
boolean validated = true;
StringBuilder rolesMsg = new StringBuilder();
for (String role : roles) {
if (!distroRoles.contains(role)) {
validated = false;
rolesMsg.append(",").append(role);
}
}
if (!validated) {
rolesMsg.replace(0, 1, "");
failedMsgList.add(new StringBuilder().append(nodeGroup.getName())
.append(".").append("roles=").append("\"")
.append(rolesMsg.toString()).append("\"").toString());
}
return validated;
}
private List<String> findDistroRoles(ClusterCreate clusterCreate) {
DistroRead distroRead = null;
distroRead =
distroRestClient
.get(clusterCreate.getDistro() != null ? clusterCreate
.getDistro() : Constants.DEFAULT_DISTRO);
if (distroRead != null) {
return distroRead.getRoles();
} else {
return null;
}
}
private void showFailedMsg(String name, List<String> failedMsgList) {
//cluster creation failed message.
StringBuilder failedMsg = new StringBuilder();
failedMsg.append(Constants.INVALID_VALUE);
if (failedMsgList.size() > 1) {
failedMsg.append("s");
}
failedMsg.append(" ");
StringBuilder tmpMsg = new StringBuilder();
for (String msg : failedMsgList) {
tmpMsg.append(",").append(msg);
}
tmpMsg.replace(0, 1, "");
failedMsg.append(tmpMsg);
CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, name,
Constants.OUTPUT_OP_CREATE, Constants.OUTPUT_OP_RESULT_FAIL,
failedMsg.toString());
}
private void validateConfiguration(ClusterCreate cluster, boolean skipConfigValidation, List<String> warningMsgList) {
// validate blacklist
ValidateResult blackListResult = validateBlackList(cluster);
if (blackListResult != null) {
addBlackListWarning(blackListResult, warningMsgList);
}
if (!skipConfigValidation) {
// validate whitelist
ValidateResult whiteListResult = validateWhiteList(cluster);
addWhiteListWarning(cluster.getName(), whiteListResult, warningMsgList);
} else {
cluster.setValidateConfig(false);
}
}
private ValidateResult validateBlackList(ClusterCreate cluster) {
return validateConfiguration(cluster, ValidationType.BLACK_LIST);
}
private ValidateResult validateWhiteList(ClusterCreate cluster) {
return validateConfiguration(cluster, ValidationType.WHITE_LIST);
}
private ValidateResult validateConfiguration(ClusterCreate cluster, ValidationType validationType) {
ValidateResult validateResult = new ValidateResult();
// validate cluster level Configuration
ValidateResult vr = null;
if (cluster.getConfiguration() != null && !cluster.getConfiguration().isEmpty()) {
vr = AppConfigValidationUtils.validateConfig(validationType, cluster.getConfiguration());
if (vr.getType() != ValidateResult.Type.VALID) {
validateResult.setType(vr.getType());
validateResult.setFailureNames(vr.getFailureNames());
}
}
// validate nodegroup level Configuration
for (NodeGroupCreate nodeGroup : cluster.getNodeGroups()) {
if (nodeGroup.getConfiguration() != null && !nodeGroup.getConfiguration().isEmpty()) {
vr = AppConfigValidationUtils.validateConfig(validationType, nodeGroup.getConfiguration());
if (vr.getType() != ValidateResult.Type.VALID) {
validateResult.setType(vr.getType());
List<String> failureNames = new LinkedList<String>();
failureNames.addAll(validateResult.getFailureNames());
for (String name : vr.getFailureNames()) {
if (!failureNames.contains(name)) {
failureNames.add(name);
}
}
validateResult.setFailureNames(vr.getFailureNames());
}
}
}
return validateResult;
}
private void addWhiteListWarning(final String clusterName, ValidateResult whiteListResult,
List<String> warningMsgList) {
if (whiteListResult.getType() == ValidateResult.Type.WHITE_LIST_INVALID_NAME) {
String warningMsg =
getValidateWarningMsg(whiteListResult.getFailureNames(),
Constants.PARAM_CLUSTER_NOT_IN_WHITE_LIST_WARNING);
if (warningMsgList != null) {
warningMsgList.add(warningMsg);
}
}
}
private void addBlackListWarning(ValidateResult blackListResult, List<String> warningList) {
if (blackListResult.getType() == ValidateResult.Type.NAME_IN_BLACK_LIST) {
String warningMsg =
getValidateWarningMsg(blackListResult.getFailureNames(), Constants.PARAM_CLUSTER_IN_BLACK_LIST_WARNING);
if (warningList != null)
warningList.add(warningMsg);
}
}
private String getValidateWarningMsg(List<String> failureNames, String warningMsg) {
StringBuilder warningMsgBuff = new StringBuilder();
if (failureNames != null && !failureNames.isEmpty()) {
warningMsgBuff.append("Warning: ");
for (String failureName : failureNames) {
warningMsgBuff.append(failureName).append(", ");
}
warningMsgBuff.delete(warningMsgBuff.length() - 2, warningMsgBuff.length());
if (failureNames.size() > 1) {
warningMsgBuff.append(" are ");
} else {
warningMsgBuff.append(" is ");
}
warningMsgBuff.append(warningMsg);
}
return warningMsgBuff.toString();
}
private boolean showWarningMsg(String clusterName, List<String> warningMsgList) {
if (warningMsgList != null && !warningMsgList.isEmpty()) {
for (String message : warningMsgList) {
System.out.println(message);
}
if (!isContinue(clusterName, Constants.OUTPUT_OP_CREATE, Constants.PARAM_PROMPT_CONTINUE_MESSAGE)) {
return false;
}
}
return true;
}
private boolean isHAFlag(NodeGroupCreate nodeGroupCreate) {
return !CommandsUtils.isBlank(nodeGroupCreate.getHaFlag())
&& !nodeGroupCreate.getHaFlag().equalsIgnoreCase("off");
}
private boolean validateHAInfo(NodeGroupCreate[] nodeGroups) {
List<String> haFlagList = Arrays.asList("off","on","ft");
if (nodeGroups != null){
for(NodeGroupCreate group : nodeGroups){
if (!haFlagList.contains(group.getHaFlag().toLowerCase())){
return false;
}
}
}
return true;
}
}
|
diff --git a/debug/src/com/dmdirc/addons/debug/commands/FakeError.java b/debug/src/com/dmdirc/addons/debug/commands/FakeError.java
index 5b568b10..278cf883 100644
--- a/debug/src/com/dmdirc/addons/debug/commands/FakeError.java
+++ b/debug/src/com/dmdirc/addons/debug/commands/FakeError.java
@@ -1,139 +1,140 @@
/*
* Copyright (c) 2006-2015 DMDirc Developers
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.dmdirc.addons.debug.commands;
import com.dmdirc.addons.debug.Debug;
import com.dmdirc.addons.debug.DebugCommand;
import com.dmdirc.commandparser.CommandArguments;
import com.dmdirc.commandparser.commands.IntelligentCommand;
import com.dmdirc.commandparser.commands.context.CommandContext;
import com.dmdirc.interfaces.WindowModel;
import com.dmdirc.ui.input.AdditionalTabTargets;
import javax.annotation.Nonnull;
import javax.inject.Inject;
import javax.inject.Provider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.Marker;
import static com.dmdirc.util.LogUtils.APP_ERROR;
import static com.dmdirc.util.LogUtils.FATAL_APP_ERROR;
import static com.dmdirc.util.LogUtils.FATAL_USER_ERROR;
import static com.dmdirc.util.LogUtils.USER_ERROR;
/**
* Creates DMDirc errors with the specified parameters.
*/
public class FakeError extends DebugCommand implements IntelligentCommand {
private static final Logger LOG = LoggerFactory.getLogger(FakeError.class);
@Inject
public FakeError(final Provider<Debug> commandProvider) {
super(commandProvider);
}
@Override
public String getName() {
return "error";
}
@Override
public String getUsage() {
return "<user|app> [<low|medium|high|fatal|unknown>] - Creates an error"
+ " with the specified parameters, defaults to high priority.";
}
@Override
public void execute(@Nonnull final WindowModel origin,
final CommandArguments args, final CommandContext context) {
if ((args.getArguments().length == 1
|| args.getArguments().length == 2)
&& "user".equals(args.getArguments()[0])) {
raiseError(getLevel(args.getArguments()), false);
} else if ((args.getArguments().length == 1
|| args.getArguments().length == 2)
&& "app".equals(args.getArguments()[0])) {
raiseError(getLevel(args.getArguments()), true);
} else {
showUsage(origin, args.isSilent(), getName(), getUsage());
}
}
private void raiseError(final String level, final boolean appError) {
final Marker marker = appError ? APP_ERROR : USER_ERROR;
switch (level.toUpperCase()) {
case "FATAL":
- LOG.error(appError ? FATAL_APP_ERROR : FATAL_USER_ERROR, "Debug error message");
+ LOG.error(appError ? FATAL_APP_ERROR : FATAL_USER_ERROR, "Debug error message",
+ new IllegalArgumentException());
break;
case "HIGH":
- LOG.error(marker, "Debug error message");
+ LOG.error(marker, "Debug error message", new IllegalArgumentException());
break;
case "MEDIUM":
- LOG.warn(marker, "Debug error message");
+ LOG.warn(marker, "Debug error message", new IllegalArgumentException());
break;
case "INFO":
- LOG.info(marker, "Debug error message");
+ LOG.info(marker, "Debug error message", new IllegalArgumentException());
break;
default:
- LOG.info(marker, "Debug error message");
+ LOG.info(marker, "Debug error message", new IllegalArgumentException());
}
}
/**
* Returns the error level specified by the provided arguments.
*
* @param args command arguments
*
* @return Error level
*/
private String getLevel(final String... args) {
if (args.length >= 2) {
return args[1].toUpperCase();
} else {
return "HIGH";
}
}
@Override
public AdditionalTabTargets getSuggestions(final int arg,
final IntelligentCommandContext context) {
final AdditionalTabTargets res = new AdditionalTabTargets();
res.excludeAll();
if (arg == 1) {
res.add("user");
res.add("app");
} else if (arg == 2) {
res.add("low");
res.add("medium");
res.add("high");
res.add("fatal");
}
return res;
}
}
| false
| true
|
private void raiseError(final String level, final boolean appError) {
final Marker marker = appError ? APP_ERROR : USER_ERROR;
switch (level.toUpperCase()) {
case "FATAL":
LOG.error(appError ? FATAL_APP_ERROR : FATAL_USER_ERROR, "Debug error message");
break;
case "HIGH":
LOG.error(marker, "Debug error message");
break;
case "MEDIUM":
LOG.warn(marker, "Debug error message");
break;
case "INFO":
LOG.info(marker, "Debug error message");
break;
default:
LOG.info(marker, "Debug error message");
}
}
|
private void raiseError(final String level, final boolean appError) {
final Marker marker = appError ? APP_ERROR : USER_ERROR;
switch (level.toUpperCase()) {
case "FATAL":
LOG.error(appError ? FATAL_APP_ERROR : FATAL_USER_ERROR, "Debug error message",
new IllegalArgumentException());
break;
case "HIGH":
LOG.error(marker, "Debug error message", new IllegalArgumentException());
break;
case "MEDIUM":
LOG.warn(marker, "Debug error message", new IllegalArgumentException());
break;
case "INFO":
LOG.info(marker, "Debug error message", new IllegalArgumentException());
break;
default:
LOG.info(marker, "Debug error message", new IllegalArgumentException());
}
}
|
diff --git a/src/main/java/com/janrain/oauth2/AnonymousTokenRequest.java b/src/main/java/com/janrain/oauth2/AnonymousTokenRequest.java
index e11aa26..df224d9 100644
--- a/src/main/java/com/janrain/oauth2/AnonymousTokenRequest.java
+++ b/src/main/java/com/janrain/oauth2/AnonymousTokenRequest.java
@@ -1,138 +1,140 @@
package com.janrain.oauth2;
import com.janrain.backplane2.server.BackplaneServerException;
import com.janrain.backplane2.server.BackplaneMessage;
import com.janrain.backplane2.server.GrantType;
import com.janrain.backplane2.server.Scope;
import com.janrain.backplane2.server.Token;
import com.janrain.backplane2.server.dao.DAOFactory;
import com.janrain.commons.supersimpledb.SimpleDBException;
import com.janrain.crypto.ChannelUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.*;
/**
* @author Johnny Bufu
*/
public class AnonymousTokenRequest implements TokenRequest {
// - PUBLIC
public AnonymousTokenRequest( String callback, String bus, String scope, String refreshToken,
DAOFactory daoFactory, HttpServletRequest request, String authHeader) throws TokenException {
this.daoFactory = daoFactory;
this.grantType = StringUtils.isEmpty(refreshToken) ? GrantType.ANONYMOUS : GrantType.REFRESH_ANONYMOUS;
if (StringUtils.isBlank(callback)) {
throw new TokenException("Callback cannot be blank");
}
if (!callback.matches("[\\._a-zA-Z0-9]*")) {
throw new TokenException("callback parameter value is malformed");
}
this.bus = bus;
if ( StringUtils.isEmpty(refreshToken) ^ StringUtils.isNotEmpty(this.bus)) {
throw new TokenException("bus parameter is required if and only if refresh_token is not present");
}
try {
if ( StringUtils.isNotEmpty(this.bus) && daoFactory.getBusDao().get(this.bus) == null) {
throw new TokenException("Invalid bus: " + bus);
}
} catch (BackplaneServerException e) {
logger.error("error processing anonymous token request: " + e.getMessage(), e);
throw new TokenException(OAuth2.OAUTH2_TOKEN_SERVER_ERROR, "error processing anonymous token request", HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
}
this.requestScope = new Scope(scope);
- if ( this.requestScope.isAuthorizationRequired() ) {
+ if ( this.requestScope.isAuthorizationRequired() ||
+ ( this.requestScope.getScopeFieldValues(BackplaneMessage.Field.CHANNEL) != null &&
+ ! this.requestScope.getScopeFieldValues(BackplaneMessage.Field.CHANNEL).isEmpty())) {
throw new TokenException(OAuth2.OAUTH2_TOKEN_INVALID_SCOPE, "Buses and channels not allowed in the scope of anonymous token requests");
}
if (StringUtils.isNotEmpty(refreshToken)) {
this.refreshToken = Token.fromRequest(daoFactory, request, refreshToken, authHeader);
if (! this.refreshToken.getScope().containsScope(this.requestScope)) {
throw new TokenException(OAuth2.OAUTH2_TOKEN_INVALID_SCOPE, "invalid scope for refresh token: " + refreshToken + " : " + scope);
}
}
// todo: check this properly, perhaps in controller?
// throw new TokenException("Must not include client_secret for anonymous token requests");
}
@Override
public Map<String,Object> tokenResponse() throws TokenException {
logger.info("Responding to anonymous token request...");
final Token accessToken;
final Integer expiresIn = grantType.getTokenExpiresSecondsDefault();
Date expires = new Date(System.currentTimeMillis() + expiresIn.longValue() * 1000);
Scope processedScope = processScope();
try {
accessToken = new Token.Builder(grantType, processedScope.toString()).expires(expires).buildToken();
daoFactory.getTokenDao().persist(accessToken);
return accessToken.response(generateRefreshToken(grantType.getRefreshType(), processedScope, daoFactory));
} catch (Exception e) {
logger.error("error processing anonymous access token request: " + e.getMessage(), e);
throw new TokenException(OAuth2.OAUTH2_TOKEN_SERVER_ERROR, "error processing anonymous token request", HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
} finally {
logger.info("exiting anonymous token request");
try {
if (this.refreshToken != null) {
daoFactory.getTokenDao().delete(this.refreshToken.getIdValue());
}
} catch (BackplaneServerException e) {
logger.error("error deleting used refresh token: " + refreshToken.getIdValue(), e);
}
}
}
// - PRIVATE
private static final Logger logger = Logger.getLogger(AnonymousTokenRequest.class);
private static final int CHANNEL_NAME_LENGTH = 32;
private DAOFactory daoFactory;
private final GrantType grantType;
private final String bus;
private final Scope requestScope;
private Token refreshToken;
private static String generateRefreshToken(GrantType refreshType, Scope scope, DAOFactory daoFactory) throws SimpleDBException, BackplaneServerException {
if (refreshType == null || ! refreshType.isRefresh()) return null;
Token refreshToken = new Token.Builder(refreshType, scope.toString()).buildToken();
daoFactory.getTokenDao().persist(refreshToken);
return refreshToken.getIdValue();
}
private Scope processScope() throws TokenException {
Map<BackplaneMessage.Field,LinkedHashSet<String>> scopeMap = new LinkedHashMap<BackplaneMessage.Field, LinkedHashSet<String>>();
scopeMap.putAll(requestScope.getScopeMap());
final String bus;
final String channel;
if (refreshToken != null ) {
final Set<String> channels = refreshToken.getScope().getScopeFieldValues(BackplaneMessage.Field.CHANNEL);
final Set<String> buses = refreshToken.getScope().getScopeFieldValues(BackplaneMessage.Field.BUS);
if ( channels == null || channels.isEmpty() || channels.size() > 1 ||
buses == null || buses.isEmpty() || buses.size() > 1 ) {
throw new TokenException("invalid anonymous refresh token: " + refreshToken.getIdValue());
} else {
bus = buses.iterator().next();
channel = channels.iterator().next();
}
} else {
bus = this.bus; // bind generated channel to the requested bus
channel = ChannelUtil.randomString(CHANNEL_NAME_LENGTH);
}
scopeMap.put(BackplaneMessage.Field.BUS, new LinkedHashSet<String>() {{ add(bus);}});
scopeMap.put(BackplaneMessage.Field.CHANNEL, new LinkedHashSet<String>() {{ add(channel);}});
return new Scope(scopeMap);
}
}
| true
| true
|
public AnonymousTokenRequest( String callback, String bus, String scope, String refreshToken,
DAOFactory daoFactory, HttpServletRequest request, String authHeader) throws TokenException {
this.daoFactory = daoFactory;
this.grantType = StringUtils.isEmpty(refreshToken) ? GrantType.ANONYMOUS : GrantType.REFRESH_ANONYMOUS;
if (StringUtils.isBlank(callback)) {
throw new TokenException("Callback cannot be blank");
}
if (!callback.matches("[\\._a-zA-Z0-9]*")) {
throw new TokenException("callback parameter value is malformed");
}
this.bus = bus;
if ( StringUtils.isEmpty(refreshToken) ^ StringUtils.isNotEmpty(this.bus)) {
throw new TokenException("bus parameter is required if and only if refresh_token is not present");
}
try {
if ( StringUtils.isNotEmpty(this.bus) && daoFactory.getBusDao().get(this.bus) == null) {
throw new TokenException("Invalid bus: " + bus);
}
} catch (BackplaneServerException e) {
logger.error("error processing anonymous token request: " + e.getMessage(), e);
throw new TokenException(OAuth2.OAUTH2_TOKEN_SERVER_ERROR, "error processing anonymous token request", HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
}
this.requestScope = new Scope(scope);
if ( this.requestScope.isAuthorizationRequired() ) {
throw new TokenException(OAuth2.OAUTH2_TOKEN_INVALID_SCOPE, "Buses and channels not allowed in the scope of anonymous token requests");
}
if (StringUtils.isNotEmpty(refreshToken)) {
this.refreshToken = Token.fromRequest(daoFactory, request, refreshToken, authHeader);
if (! this.refreshToken.getScope().containsScope(this.requestScope)) {
throw new TokenException(OAuth2.OAUTH2_TOKEN_INVALID_SCOPE, "invalid scope for refresh token: " + refreshToken + " : " + scope);
}
}
// todo: check this properly, perhaps in controller?
// throw new TokenException("Must not include client_secret for anonymous token requests");
}
|
public AnonymousTokenRequest( String callback, String bus, String scope, String refreshToken,
DAOFactory daoFactory, HttpServletRequest request, String authHeader) throws TokenException {
this.daoFactory = daoFactory;
this.grantType = StringUtils.isEmpty(refreshToken) ? GrantType.ANONYMOUS : GrantType.REFRESH_ANONYMOUS;
if (StringUtils.isBlank(callback)) {
throw new TokenException("Callback cannot be blank");
}
if (!callback.matches("[\\._a-zA-Z0-9]*")) {
throw new TokenException("callback parameter value is malformed");
}
this.bus = bus;
if ( StringUtils.isEmpty(refreshToken) ^ StringUtils.isNotEmpty(this.bus)) {
throw new TokenException("bus parameter is required if and only if refresh_token is not present");
}
try {
if ( StringUtils.isNotEmpty(this.bus) && daoFactory.getBusDao().get(this.bus) == null) {
throw new TokenException("Invalid bus: " + bus);
}
} catch (BackplaneServerException e) {
logger.error("error processing anonymous token request: " + e.getMessage(), e);
throw new TokenException(OAuth2.OAUTH2_TOKEN_SERVER_ERROR, "error processing anonymous token request", HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
}
this.requestScope = new Scope(scope);
if ( this.requestScope.isAuthorizationRequired() ||
( this.requestScope.getScopeFieldValues(BackplaneMessage.Field.CHANNEL) != null &&
! this.requestScope.getScopeFieldValues(BackplaneMessage.Field.CHANNEL).isEmpty())) {
throw new TokenException(OAuth2.OAUTH2_TOKEN_INVALID_SCOPE, "Buses and channels not allowed in the scope of anonymous token requests");
}
if (StringUtils.isNotEmpty(refreshToken)) {
this.refreshToken = Token.fromRequest(daoFactory, request, refreshToken, authHeader);
if (! this.refreshToken.getScope().containsScope(this.requestScope)) {
throw new TokenException(OAuth2.OAUTH2_TOKEN_INVALID_SCOPE, "invalid scope for refresh token: " + refreshToken + " : " + scope);
}
}
// todo: check this properly, perhaps in controller?
// throw new TokenException("Must not include client_secret for anonymous token requests");
}
|
diff --git a/src/java/org/jivesoftware/sparkimpl/plugin/alerts/BroadcastDialog.java b/src/java/org/jivesoftware/sparkimpl/plugin/alerts/BroadcastDialog.java
index d203da77..cb85a6a8 100644
--- a/src/java/org/jivesoftware/sparkimpl/plugin/alerts/BroadcastDialog.java
+++ b/src/java/org/jivesoftware/sparkimpl/plugin/alerts/BroadcastDialog.java
@@ -1,218 +1,218 @@
/**
* $Revision: $
* $Date: $
*
* Copyright (C) 2006 Jive Software. All rights reserved.
*
* This software is published under the terms of the GNU Lesser Public License (LGPL),
* a copy of which is included in this distribution.
*/
package org.jivesoftware.sparkimpl.plugin.alerts;
import org.jivesoftware.resource.Res;
import org.jivesoftware.smack.packet.Message;
import org.jivesoftware.spark.SparkManager;
import org.jivesoftware.spark.component.CheckNode;
import org.jivesoftware.spark.component.CheckTree;
import org.jivesoftware.spark.component.TitlePanel;
import org.jivesoftware.spark.ui.ContactGroup;
import org.jivesoftware.spark.ui.ContactItem;
import org.jivesoftware.spark.ui.ContactList;
import org.jivesoftware.spark.util.ModelUtil;
import javax.swing.BorderFactory;
import javax.swing.ButtonGroup;
import javax.swing.JDialog;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import java.awt.BorderLayout;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Allows for better selective broadcasting.
*
* @author Derek DeMoro
*/
public class BroadcastDialog extends JPanel {
private CheckTree checkTree;
private CheckNode rosterNode;
private JTextArea messageBox;
private JRadioButton normalMessageButton;
private JRadioButton alertMessageButton;
private List<CheckNode> nodes = new ArrayList<CheckNode>();
private List<CheckNode> groupNodes = new ArrayList<CheckNode>();
public BroadcastDialog() {
setLayout(new GridBagLayout());
rosterNode = new CheckNode("Roster");
checkTree = new CheckTree(rosterNode);
// Build out from Roster
final ContactList contactList = SparkManager.getWorkspace().getContactList();
for (ContactGroup group : contactList.getContactGroups()) {
String groupName = group.getGroupName();
if (!group.hasAvailableContacts()) {
continue;
}
CheckNode groupNode = new CheckNode(groupName);
groupNodes.add(groupNode);
rosterNode.add(groupNode);
// Now add contact items from contact group.
for (ContactItem item : group.getContactItems()) {
CheckNode itemNode = new CheckNode(item.getNickname(), false, item.getIcon());
itemNode.setAssociatedObject(item.getJID());
groupNode.add(itemNode);
nodes.add(itemNode);
}
}
messageBox = new JTextArea();
- normalMessageButton = new JRadioButton("Normal Message");
- alertMessageButton = new JRadioButton("Alert Notification");
+ normalMessageButton = new JRadioButton(Res.getString("message.normal"));
+ alertMessageButton = new JRadioButton(Res.getString("message.alert.notify"));
ButtonGroup group = new ButtonGroup();
group.add(normalMessageButton);
group.add(alertMessageButton);
final JScrollPane pane = new JScrollPane(messageBox);
- pane.setBorder(BorderFactory.createTitledBorder("Message"));
+ pane.setBorder(BorderFactory.createTitledBorder(Res.getString("message")));
final JScrollPane treePane = new JScrollPane(checkTree);
- treePane.setBorder(BorderFactory.createTitledBorder("Send to these people"));
+ treePane.setBorder(BorderFactory.createTitledBorder(Res.getString("message.send.to.these.people")));
// Add to UI
add(pane, new GridBagConstraints(0, 0, 1, 1, 0.5, 1.0, GridBagConstraints.WEST, GridBagConstraints.BOTH, new Insets(5, 5, 5, 5), 0, 0));
add(normalMessageButton, new GridBagConstraints(0, 1, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(5, 5, 5, 5), 0, 0));
add(alertMessageButton, new GridBagConstraints(0, 2, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(2, 5, 2, 5), 0, 0));
add(treePane, new GridBagConstraints(1, 0, 1, 3, 0.5, 1.0, GridBagConstraints.WEST, GridBagConstraints.BOTH, new Insets(2, 5, 2, 5), 0, 0));
normalMessageButton.setSelected(true);
checkTree.expandTree();
// Iterate through selected users.
for (ContactItem item : contactList.getSelectedUsers()) {
for (CheckNode node : nodes) {
if (node.getAssociatedObject().toString().equals(item.getJID())) {
node.setSelected(true);
}
}
}
}
public void invokeDialog(ContactGroup group) {
for (CheckNode node : groupNodes) {
if (node.getUserObject().toString().equals(group.getGroupName())) {
node.setSelected(true);
}
}
invokeDialog();
}
/**
* Displays the broadcast dialog.
*/
public void invokeDialog() {
final JOptionPane pane;
final JDialog dlg;
TitlePanel titlePanel;
// Create the title panel for this dialog
titlePanel = new TitlePanel(Res.getString("title.broadcast.message"), Res.getString("message.enter.broadcast.message"), null, true);
// Construct main panel w/ layout.
final JPanel mainPanel = new JPanel();
mainPanel.setLayout(new BorderLayout());
mainPanel.add(titlePanel, BorderLayout.NORTH);
// The user should only be able to close this dialog.
Object[] options = {Res.getString("ok"), Res.getString("close")};
pane = new JOptionPane(this, JOptionPane.PLAIN_MESSAGE, JOptionPane.OK_CANCEL_OPTION, null, options, options[0]);
mainPanel.add(pane, BorderLayout.CENTER);
JOptionPane p = new JOptionPane();
dlg = p.createDialog(SparkManager.getMainWindow(), "Broadcast");
dlg.setModal(false);
dlg.pack();
dlg.setSize(600, 400);
dlg.setResizable(true);
dlg.setContentPane(mainPanel);
dlg.setLocationRelativeTo(SparkManager.getMainWindow());
PropertyChangeListener changeListener = new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent e) {
String value = (String)pane.getValue();
if (Res.getString("close").equals(value)) {
dlg.setVisible(false);
}
else {
dlg.setVisible(false);
sendBroadcasts();
}
}
};
pane.addPropertyChangeListener(changeListener);
dlg.setVisible(true);
dlg.toFront();
dlg.requestFocus();
messageBox.requestFocus();
}
/**
* Sends a broadcast message to all users selected.
*/
private void sendBroadcasts() {
final Set<String> jids = new HashSet<String>();
for (CheckNode node : nodes) {
if (node.isSelected()) {
String jid = (String)node.getAssociatedObject();
jids.add(jid);
}
}
String text = messageBox.getText();
if (!ModelUtil.hasLength(text)) {
return;
}
for (String jid : jids) {
final Message message = new Message();
message.setTo(jid);
message.setBody(text);
if (normalMessageButton.isSelected()) {
message.setType(Message.Type.normal);
}
else {
message.setType(Message.Type.headline);
}
SparkManager.getConnection().sendPacket(message);
}
}
}
| false
| true
|
public BroadcastDialog() {
setLayout(new GridBagLayout());
rosterNode = new CheckNode("Roster");
checkTree = new CheckTree(rosterNode);
// Build out from Roster
final ContactList contactList = SparkManager.getWorkspace().getContactList();
for (ContactGroup group : contactList.getContactGroups()) {
String groupName = group.getGroupName();
if (!group.hasAvailableContacts()) {
continue;
}
CheckNode groupNode = new CheckNode(groupName);
groupNodes.add(groupNode);
rosterNode.add(groupNode);
// Now add contact items from contact group.
for (ContactItem item : group.getContactItems()) {
CheckNode itemNode = new CheckNode(item.getNickname(), false, item.getIcon());
itemNode.setAssociatedObject(item.getJID());
groupNode.add(itemNode);
nodes.add(itemNode);
}
}
messageBox = new JTextArea();
normalMessageButton = new JRadioButton("Normal Message");
alertMessageButton = new JRadioButton("Alert Notification");
ButtonGroup group = new ButtonGroup();
group.add(normalMessageButton);
group.add(alertMessageButton);
final JScrollPane pane = new JScrollPane(messageBox);
pane.setBorder(BorderFactory.createTitledBorder("Message"));
final JScrollPane treePane = new JScrollPane(checkTree);
treePane.setBorder(BorderFactory.createTitledBorder("Send to these people"));
// Add to UI
add(pane, new GridBagConstraints(0, 0, 1, 1, 0.5, 1.0, GridBagConstraints.WEST, GridBagConstraints.BOTH, new Insets(5, 5, 5, 5), 0, 0));
add(normalMessageButton, new GridBagConstraints(0, 1, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(5, 5, 5, 5), 0, 0));
add(alertMessageButton, new GridBagConstraints(0, 2, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(2, 5, 2, 5), 0, 0));
add(treePane, new GridBagConstraints(1, 0, 1, 3, 0.5, 1.0, GridBagConstraints.WEST, GridBagConstraints.BOTH, new Insets(2, 5, 2, 5), 0, 0));
normalMessageButton.setSelected(true);
checkTree.expandTree();
// Iterate through selected users.
for (ContactItem item : contactList.getSelectedUsers()) {
for (CheckNode node : nodes) {
if (node.getAssociatedObject().toString().equals(item.getJID())) {
node.setSelected(true);
}
}
}
}
|
public BroadcastDialog() {
setLayout(new GridBagLayout());
rosterNode = new CheckNode("Roster");
checkTree = new CheckTree(rosterNode);
// Build out from Roster
final ContactList contactList = SparkManager.getWorkspace().getContactList();
for (ContactGroup group : contactList.getContactGroups()) {
String groupName = group.getGroupName();
if (!group.hasAvailableContacts()) {
continue;
}
CheckNode groupNode = new CheckNode(groupName);
groupNodes.add(groupNode);
rosterNode.add(groupNode);
// Now add contact items from contact group.
for (ContactItem item : group.getContactItems()) {
CheckNode itemNode = new CheckNode(item.getNickname(), false, item.getIcon());
itemNode.setAssociatedObject(item.getJID());
groupNode.add(itemNode);
nodes.add(itemNode);
}
}
messageBox = new JTextArea();
normalMessageButton = new JRadioButton(Res.getString("message.normal"));
alertMessageButton = new JRadioButton(Res.getString("message.alert.notify"));
ButtonGroup group = new ButtonGroup();
group.add(normalMessageButton);
group.add(alertMessageButton);
final JScrollPane pane = new JScrollPane(messageBox);
pane.setBorder(BorderFactory.createTitledBorder(Res.getString("message")));
final JScrollPane treePane = new JScrollPane(checkTree);
treePane.setBorder(BorderFactory.createTitledBorder(Res.getString("message.send.to.these.people")));
// Add to UI
add(pane, new GridBagConstraints(0, 0, 1, 1, 0.5, 1.0, GridBagConstraints.WEST, GridBagConstraints.BOTH, new Insets(5, 5, 5, 5), 0, 0));
add(normalMessageButton, new GridBagConstraints(0, 1, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(5, 5, 5, 5), 0, 0));
add(alertMessageButton, new GridBagConstraints(0, 2, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(2, 5, 2, 5), 0, 0));
add(treePane, new GridBagConstraints(1, 0, 1, 3, 0.5, 1.0, GridBagConstraints.WEST, GridBagConstraints.BOTH, new Insets(2, 5, 2, 5), 0, 0));
normalMessageButton.setSelected(true);
checkTree.expandTree();
// Iterate through selected users.
for (ContactItem item : contactList.getSelectedUsers()) {
for (CheckNode node : nodes) {
if (node.getAssociatedObject().toString().equals(item.getJID())) {
node.setSelected(true);
}
}
}
}
|
diff --git a/src/org/evswork/whatsdonetoday/DatePickerPreference.java b/src/org/evswork/whatsdonetoday/DatePickerPreference.java
index b1b7c5c..b3a631c 100644
--- a/src/org/evswork/whatsdonetoday/DatePickerPreference.java
+++ b/src/org/evswork/whatsdonetoday/DatePickerPreference.java
@@ -1,76 +1,76 @@
/*
Copyright 2012 Javran Cheng
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.evswork.whatsdonetoday;
import android.app.DatePickerDialog;
import android.app.DatePickerDialog.OnDateSetListener;
import android.content.Context;
import android.preference.Preference;
import android.util.AttributeSet;
import android.widget.DatePicker;
public class DatePickerPreference extends Preference {
public int year = 2012;
public int month = 1;
public int day = 1;
public DatePickerPreference(Context context, AttributeSet attrs,
int defStyle) {
super(context, attrs, defStyle);
initPreference();
}
public DatePickerPreference(Context context, AttributeSet attrs) {
super(context, attrs);
initPreference();
}
private void initPreference() {
setPersistent(false);
setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
DatePickerDialog dialog = new DatePickerDialog(getContext(), new OnDateSetListener() {
@Override
public void onDateSet(DatePicker view, int year, int monthOfYear,
int dayOfMonth) {
DatePickerPreference.this.year = year;
- DatePickerPreference.this.month = monthOfYear;
+ DatePickerPreference.this.month = monthOfYear + 1;
DatePickerPreference.this.day = dayOfMonth;
updateUI();
}
- }, year, month, day);
+ }, year, month - 1, day);
dialog.show();
return true;
}
});
updateUI();
}
public void updateUI() {
setSummary(buildDateString(year, month, day));
}
public static String buildDateString(int year, int month, int day) {
return String.format("%04d-%02d-%02d", year, month, day);
}
}
| false
| true
|
private void initPreference() {
setPersistent(false);
setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
DatePickerDialog dialog = new DatePickerDialog(getContext(), new OnDateSetListener() {
@Override
public void onDateSet(DatePicker view, int year, int monthOfYear,
int dayOfMonth) {
DatePickerPreference.this.year = year;
DatePickerPreference.this.month = monthOfYear;
DatePickerPreference.this.day = dayOfMonth;
updateUI();
}
}, year, month, day);
dialog.show();
return true;
}
});
updateUI();
}
|
private void initPreference() {
setPersistent(false);
setOnPreferenceClickListener(new OnPreferenceClickListener() {
@Override
public boolean onPreferenceClick(Preference preference) {
DatePickerDialog dialog = new DatePickerDialog(getContext(), new OnDateSetListener() {
@Override
public void onDateSet(DatePicker view, int year, int monthOfYear,
int dayOfMonth) {
DatePickerPreference.this.year = year;
DatePickerPreference.this.month = monthOfYear + 1;
DatePickerPreference.this.day = dayOfMonth;
updateUI();
}
}, year, month - 1, day);
dialog.show();
return true;
}
});
updateUI();
}
|
diff --git a/org.emftext.sdk.codegen/src/org/emftext/sdk/codegen/generators/LocationMapGenerator.java b/org.emftext.sdk.codegen/src/org/emftext/sdk/codegen/generators/LocationMapGenerator.java
index 11d3e10e3..1665a27bf 100644
--- a/org.emftext.sdk.codegen/src/org/emftext/sdk/codegen/generators/LocationMapGenerator.java
+++ b/org.emftext.sdk.codegen/src/org/emftext/sdk/codegen/generators/LocationMapGenerator.java
@@ -1,166 +1,166 @@
package org.emftext.sdk.codegen.generators;
import static org.emftext.sdk.codegen.generators.IClassNameConstants.ARRAY_LIST;
import static org.emftext.sdk.codegen.generators.IClassNameConstants.BASIC_E_MAP;
import static org.emftext.sdk.codegen.generators.IClassNameConstants.COLLECTIONS;
import static org.emftext.sdk.codegen.generators.IClassNameConstants.COMPARATOR;
import static org.emftext.sdk.codegen.generators.IClassNameConstants.E_MAP;
import static org.emftext.sdk.codegen.generators.IClassNameConstants.E_OBJECT;
import static org.emftext.sdk.codegen.generators.IClassNameConstants.LIST;
import java.io.PrintWriter;
import org.emftext.sdk.codegen.EArtifact;
import org.emftext.sdk.codegen.GenerationContext;
import org.emftext.sdk.codegen.IGenerator;
public class LocationMapGenerator extends BaseGenerator {
public LocationMapGenerator() {
super();
}
private LocationMapGenerator(GenerationContext context) {
super(context, EArtifact.LOCATION_MAP);
}
public boolean generate(PrintWriter out) {
org.emftext.sdk.codegen.composites.StringComposite sc = new org.emftext.sdk.codegen.composites.JavaComposite();
sc.add("package " + getResourcePackageName() + ";");
sc.addLineBreak();
sc.add("// A basic implementation of the ILocationMap interface. Instances");
sc.add("// store information about element locations using four maps.");
sc.add("// <p>");
sc.add("// The set-methods can be called multiple times by the parser that may visit");
sc.add("// multiple children from which it copies the localization information for the parent");
sc.add("// (i.e., the element for which set-method is called)");
sc.add("// It implements the following behavior:");
sc.add("// <p>");
sc.add("// Line: The lowest of all sources is used for target<br>");
sc.add("// Column: The lowest of all sources is used for target<br>");
sc.add("// Start: The lowest of all sources is used for target<br>");
sc.add("// End: The highest of all sources is used for target<br>");
sc.add("//");
sc.add("public class " + getResourceClassName() + " implements " + getClassNameHelper().getI_LOCATION_MAP() + " {");
sc.addLineBreak();
sc.add("// A basic interface that can be implemented to select");
sc.add("// EObjects based of their location in a text resource.");
sc.add("public interface ISelector {");
sc.add("boolean accept(int startOffset, int endOffset);");
sc.add("}");
sc.addLineBreak();
sc.add("protected " + E_MAP + "<" + E_OBJECT + ", Integer> columnMap = new " + BASIC_E_MAP + "<" + E_OBJECT + ", Integer>();");
sc.add("protected " + E_MAP + "<" + E_OBJECT + ", Integer> lineMap = new " + BASIC_E_MAP + "<" + E_OBJECT + ", Integer>();");
sc.add("protected " + E_MAP + "<" + E_OBJECT + ", Integer> charStartMap = new " + BASIC_E_MAP + "<" + E_OBJECT + ", Integer>();");
sc.add("protected " + E_MAP + "<" + E_OBJECT + ", Integer> charEndMap = new " + BASIC_E_MAP + "<" + E_OBJECT + ", Integer>();");
sc.addLineBreak();
sc.add("public void setLine(" + E_OBJECT + " element, int line) {");
sc.add("setMapValueToMin(lineMap, element, line);");
sc.add("}");
sc.addLineBreak();
sc.add("public int getLine(" + E_OBJECT + " element) {");
sc.add("return getMapValue(lineMap, element);");
sc.add("}");
sc.addLineBreak();
sc.add("public void setColumn(" + E_OBJECT + " element, int column) {");
sc.add("setMapValueToMin(columnMap, element, column);");
sc.add("}");
sc.addLineBreak();
sc.add("public int getColumn(" + E_OBJECT + " element) {");
sc.add("return getMapValue(columnMap, element);");
sc.add("}");
sc.addLineBreak();
sc.add("public void setCharStart(" + E_OBJECT + " element, int charStart) {");
sc.add("setMapValueToMin(charStartMap, element, charStart);");
sc.add("}");
sc.addLineBreak();
sc.add("public int getCharStart(" + E_OBJECT + " element) {");
sc.add("return getMapValue(charStartMap, element);");
sc.add("}");
sc.addLineBreak();
sc.add("public void setCharEnd(" + E_OBJECT + " element, int charEnd) {");
sc.add("setMapValueToMax(charEndMap, element, charEnd);");
sc.add("}");
sc.addLineBreak();
sc.add("public int getCharEnd(" + E_OBJECT + " element) {");
sc.add("return getMapValue(charEndMap, element);");
sc.add("}");
sc.addLineBreak();
sc.add("private int getMapValue(" + E_MAP + "<" + E_OBJECT + ", Integer> map, " + E_OBJECT + " element) {");
sc.add("if (!map.containsKey(element)) return -1;");
sc.add("return map.get(element);");
sc.add("}");
sc.addLineBreak();
sc.add("private void setMapValueToMin(" + E_MAP + "<" + E_OBJECT + ", Integer> map, " + E_OBJECT + " element, int value) {");
- sc.add("// we to synchronize the write access, because other threads may iterate");
+ sc.add("// we need to synchronize the write access, because other threads may iterate");
sc.add("// over the map concurrently");
sc.add("synchronized (this) {");
sc.add("if (element == null || value < 0) return;");
sc.add("if (map.containsKey(element) && map.get(element) < value) return;");
sc.add("map.put(element, value);");
sc.add("}");
sc.add("}");
sc.addLineBreak();
sc.add("private void setMapValueToMax(" + E_MAP + "<" + E_OBJECT + ", Integer> map, " + E_OBJECT + " element, int value) {");
- sc.add("// we to synchronize the write access, because other threads may iterate");
+ sc.add("// we need to synchronize the write access, because other threads may iterate");
sc.add("// over the map concurrently");
sc.add("synchronized (this) {");
sc.add("if (element == null || value < 0) return;");
sc.add("if (map.containsKey(element) && map.get(element) > value) return;");
sc.add("map.put(element, value);");
sc.add("}");
sc.add("}");
sc.addLineBreak();
sc.add("public " + LIST + "<" + E_OBJECT + "> getElementsAt(final int documentOffset) {");
sc.add(LIST + "<" + E_OBJECT + "> result = getElements(new ISelector() {");
sc.add("public boolean accept(int start, int end) {");
sc.add("return start <= documentOffset && end >= documentOffset;");
sc.add("}");
sc.add("});");
sc.add("return result;");
sc.add("}");
sc.addLineBreak();
sc.add("public " + LIST + "<" + E_OBJECT + "> getElementsBetween(final int startOffset, final int endOffset) {");
sc.add("" + LIST + "<" + E_OBJECT + "> result = getElements(new ISelector() {");
sc.add("public boolean accept(int start, int end) {");
sc.add("return start >= startOffset && end <= endOffset;");
sc.add("}");
sc.add("});");
sc.add("return result;");
sc.add("}");
sc.addLineBreak();
sc.add("private " + LIST + "<" + E_OBJECT + "> getElements(ISelector s) {");
sc.add("// there might be more than one element at the given offset");
sc.add("// thus, we collect all of them and sort them afterwards");
sc.add(LIST + "<" + E_OBJECT + "> result = new " + ARRAY_LIST + "<" + E_OBJECT + ">();");
sc.addLineBreak();
- sc.add("// we to synchronize the iteration over the map concurrently");
- sc.add("// because other threads may write to the map concurrently");
+ sc.add("// we need to synchronize the iteration over the map, because");
+ sc.add("// other threads may write to the map concurrently");
sc.add("synchronized (this) {");
sc.add("for (" + E_OBJECT + " next : charStartMap.keySet()) {");
sc.add("int start = charStartMap.get(next);");
sc.add("int end = charEndMap.get(next);");
sc.add("if (s.accept(start, end)) {");
sc.add("result.add(next);");
sc.add("}");
sc.add("}");
sc.add("}");
sc.add(COLLECTIONS + ".sort(result, new " + COMPARATOR + "<" + E_OBJECT + ">() {");
sc.add("public int compare(" + E_OBJECT + " objectA, " + E_OBJECT + " objectB) {");
sc.add("int lengthA = getCharEnd(objectA) - getCharStart(objectA);");
sc.add("int lengthB = getCharEnd(objectB) - getCharStart(objectB);");
sc.add("return lengthA - lengthB;");
sc.add("}");
sc.add("});");
sc.add("return result;");
sc.add("}");
sc.add("}");
out.print(sc.toString());
return true;
}
public IGenerator newInstance(GenerationContext context) {
return new LocationMapGenerator(context);
}
}
| false
| true
|
public boolean generate(PrintWriter out) {
org.emftext.sdk.codegen.composites.StringComposite sc = new org.emftext.sdk.codegen.composites.JavaComposite();
sc.add("package " + getResourcePackageName() + ";");
sc.addLineBreak();
sc.add("// A basic implementation of the ILocationMap interface. Instances");
sc.add("// store information about element locations using four maps.");
sc.add("// <p>");
sc.add("// The set-methods can be called multiple times by the parser that may visit");
sc.add("// multiple children from which it copies the localization information for the parent");
sc.add("// (i.e., the element for which set-method is called)");
sc.add("// It implements the following behavior:");
sc.add("// <p>");
sc.add("// Line: The lowest of all sources is used for target<br>");
sc.add("// Column: The lowest of all sources is used for target<br>");
sc.add("// Start: The lowest of all sources is used for target<br>");
sc.add("// End: The highest of all sources is used for target<br>");
sc.add("//");
sc.add("public class " + getResourceClassName() + " implements " + getClassNameHelper().getI_LOCATION_MAP() + " {");
sc.addLineBreak();
sc.add("// A basic interface that can be implemented to select");
sc.add("// EObjects based of their location in a text resource.");
sc.add("public interface ISelector {");
sc.add("boolean accept(int startOffset, int endOffset);");
sc.add("}");
sc.addLineBreak();
sc.add("protected " + E_MAP + "<" + E_OBJECT + ", Integer> columnMap = new " + BASIC_E_MAP + "<" + E_OBJECT + ", Integer>();");
sc.add("protected " + E_MAP + "<" + E_OBJECT + ", Integer> lineMap = new " + BASIC_E_MAP + "<" + E_OBJECT + ", Integer>();");
sc.add("protected " + E_MAP + "<" + E_OBJECT + ", Integer> charStartMap = new " + BASIC_E_MAP + "<" + E_OBJECT + ", Integer>();");
sc.add("protected " + E_MAP + "<" + E_OBJECT + ", Integer> charEndMap = new " + BASIC_E_MAP + "<" + E_OBJECT + ", Integer>();");
sc.addLineBreak();
sc.add("public void setLine(" + E_OBJECT + " element, int line) {");
sc.add("setMapValueToMin(lineMap, element, line);");
sc.add("}");
sc.addLineBreak();
sc.add("public int getLine(" + E_OBJECT + " element) {");
sc.add("return getMapValue(lineMap, element);");
sc.add("}");
sc.addLineBreak();
sc.add("public void setColumn(" + E_OBJECT + " element, int column) {");
sc.add("setMapValueToMin(columnMap, element, column);");
sc.add("}");
sc.addLineBreak();
sc.add("public int getColumn(" + E_OBJECT + " element) {");
sc.add("return getMapValue(columnMap, element);");
sc.add("}");
sc.addLineBreak();
sc.add("public void setCharStart(" + E_OBJECT + " element, int charStart) {");
sc.add("setMapValueToMin(charStartMap, element, charStart);");
sc.add("}");
sc.addLineBreak();
sc.add("public int getCharStart(" + E_OBJECT + " element) {");
sc.add("return getMapValue(charStartMap, element);");
sc.add("}");
sc.addLineBreak();
sc.add("public void setCharEnd(" + E_OBJECT + " element, int charEnd) {");
sc.add("setMapValueToMax(charEndMap, element, charEnd);");
sc.add("}");
sc.addLineBreak();
sc.add("public int getCharEnd(" + E_OBJECT + " element) {");
sc.add("return getMapValue(charEndMap, element);");
sc.add("}");
sc.addLineBreak();
sc.add("private int getMapValue(" + E_MAP + "<" + E_OBJECT + ", Integer> map, " + E_OBJECT + " element) {");
sc.add("if (!map.containsKey(element)) return -1;");
sc.add("return map.get(element);");
sc.add("}");
sc.addLineBreak();
sc.add("private void setMapValueToMin(" + E_MAP + "<" + E_OBJECT + ", Integer> map, " + E_OBJECT + " element, int value) {");
sc.add("// we to synchronize the write access, because other threads may iterate");
sc.add("// over the map concurrently");
sc.add("synchronized (this) {");
sc.add("if (element == null || value < 0) return;");
sc.add("if (map.containsKey(element) && map.get(element) < value) return;");
sc.add("map.put(element, value);");
sc.add("}");
sc.add("}");
sc.addLineBreak();
sc.add("private void setMapValueToMax(" + E_MAP + "<" + E_OBJECT + ", Integer> map, " + E_OBJECT + " element, int value) {");
sc.add("// we to synchronize the write access, because other threads may iterate");
sc.add("// over the map concurrently");
sc.add("synchronized (this) {");
sc.add("if (element == null || value < 0) return;");
sc.add("if (map.containsKey(element) && map.get(element) > value) return;");
sc.add("map.put(element, value);");
sc.add("}");
sc.add("}");
sc.addLineBreak();
sc.add("public " + LIST + "<" + E_OBJECT + "> getElementsAt(final int documentOffset) {");
sc.add(LIST + "<" + E_OBJECT + "> result = getElements(new ISelector() {");
sc.add("public boolean accept(int start, int end) {");
sc.add("return start <= documentOffset && end >= documentOffset;");
sc.add("}");
sc.add("});");
sc.add("return result;");
sc.add("}");
sc.addLineBreak();
sc.add("public " + LIST + "<" + E_OBJECT + "> getElementsBetween(final int startOffset, final int endOffset) {");
sc.add("" + LIST + "<" + E_OBJECT + "> result = getElements(new ISelector() {");
sc.add("public boolean accept(int start, int end) {");
sc.add("return start >= startOffset && end <= endOffset;");
sc.add("}");
sc.add("});");
sc.add("return result;");
sc.add("}");
sc.addLineBreak();
sc.add("private " + LIST + "<" + E_OBJECT + "> getElements(ISelector s) {");
sc.add("// there might be more than one element at the given offset");
sc.add("// thus, we collect all of them and sort them afterwards");
sc.add(LIST + "<" + E_OBJECT + "> result = new " + ARRAY_LIST + "<" + E_OBJECT + ">();");
sc.addLineBreak();
sc.add("// we to synchronize the iteration over the map concurrently");
sc.add("// because other threads may write to the map concurrently");
sc.add("synchronized (this) {");
sc.add("for (" + E_OBJECT + " next : charStartMap.keySet()) {");
sc.add("int start = charStartMap.get(next);");
sc.add("int end = charEndMap.get(next);");
sc.add("if (s.accept(start, end)) {");
sc.add("result.add(next);");
sc.add("}");
sc.add("}");
sc.add("}");
sc.add(COLLECTIONS + ".sort(result, new " + COMPARATOR + "<" + E_OBJECT + ">() {");
sc.add("public int compare(" + E_OBJECT + " objectA, " + E_OBJECT + " objectB) {");
sc.add("int lengthA = getCharEnd(objectA) - getCharStart(objectA);");
sc.add("int lengthB = getCharEnd(objectB) - getCharStart(objectB);");
sc.add("return lengthA - lengthB;");
sc.add("}");
sc.add("});");
sc.add("return result;");
sc.add("}");
sc.add("}");
out.print(sc.toString());
return true;
}
|
public boolean generate(PrintWriter out) {
org.emftext.sdk.codegen.composites.StringComposite sc = new org.emftext.sdk.codegen.composites.JavaComposite();
sc.add("package " + getResourcePackageName() + ";");
sc.addLineBreak();
sc.add("// A basic implementation of the ILocationMap interface. Instances");
sc.add("// store information about element locations using four maps.");
sc.add("// <p>");
sc.add("// The set-methods can be called multiple times by the parser that may visit");
sc.add("// multiple children from which it copies the localization information for the parent");
sc.add("// (i.e., the element for which set-method is called)");
sc.add("// It implements the following behavior:");
sc.add("// <p>");
sc.add("// Line: The lowest of all sources is used for target<br>");
sc.add("// Column: The lowest of all sources is used for target<br>");
sc.add("// Start: The lowest of all sources is used for target<br>");
sc.add("// End: The highest of all sources is used for target<br>");
sc.add("//");
sc.add("public class " + getResourceClassName() + " implements " + getClassNameHelper().getI_LOCATION_MAP() + " {");
sc.addLineBreak();
sc.add("// A basic interface that can be implemented to select");
sc.add("// EObjects based of their location in a text resource.");
sc.add("public interface ISelector {");
sc.add("boolean accept(int startOffset, int endOffset);");
sc.add("}");
sc.addLineBreak();
sc.add("protected " + E_MAP + "<" + E_OBJECT + ", Integer> columnMap = new " + BASIC_E_MAP + "<" + E_OBJECT + ", Integer>();");
sc.add("protected " + E_MAP + "<" + E_OBJECT + ", Integer> lineMap = new " + BASIC_E_MAP + "<" + E_OBJECT + ", Integer>();");
sc.add("protected " + E_MAP + "<" + E_OBJECT + ", Integer> charStartMap = new " + BASIC_E_MAP + "<" + E_OBJECT + ", Integer>();");
sc.add("protected " + E_MAP + "<" + E_OBJECT + ", Integer> charEndMap = new " + BASIC_E_MAP + "<" + E_OBJECT + ", Integer>();");
sc.addLineBreak();
sc.add("public void setLine(" + E_OBJECT + " element, int line) {");
sc.add("setMapValueToMin(lineMap, element, line);");
sc.add("}");
sc.addLineBreak();
sc.add("public int getLine(" + E_OBJECT + " element) {");
sc.add("return getMapValue(lineMap, element);");
sc.add("}");
sc.addLineBreak();
sc.add("public void setColumn(" + E_OBJECT + " element, int column) {");
sc.add("setMapValueToMin(columnMap, element, column);");
sc.add("}");
sc.addLineBreak();
sc.add("public int getColumn(" + E_OBJECT + " element) {");
sc.add("return getMapValue(columnMap, element);");
sc.add("}");
sc.addLineBreak();
sc.add("public void setCharStart(" + E_OBJECT + " element, int charStart) {");
sc.add("setMapValueToMin(charStartMap, element, charStart);");
sc.add("}");
sc.addLineBreak();
sc.add("public int getCharStart(" + E_OBJECT + " element) {");
sc.add("return getMapValue(charStartMap, element);");
sc.add("}");
sc.addLineBreak();
sc.add("public void setCharEnd(" + E_OBJECT + " element, int charEnd) {");
sc.add("setMapValueToMax(charEndMap, element, charEnd);");
sc.add("}");
sc.addLineBreak();
sc.add("public int getCharEnd(" + E_OBJECT + " element) {");
sc.add("return getMapValue(charEndMap, element);");
sc.add("}");
sc.addLineBreak();
sc.add("private int getMapValue(" + E_MAP + "<" + E_OBJECT + ", Integer> map, " + E_OBJECT + " element) {");
sc.add("if (!map.containsKey(element)) return -1;");
sc.add("return map.get(element);");
sc.add("}");
sc.addLineBreak();
sc.add("private void setMapValueToMin(" + E_MAP + "<" + E_OBJECT + ", Integer> map, " + E_OBJECT + " element, int value) {");
sc.add("// we need to synchronize the write access, because other threads may iterate");
sc.add("// over the map concurrently");
sc.add("synchronized (this) {");
sc.add("if (element == null || value < 0) return;");
sc.add("if (map.containsKey(element) && map.get(element) < value) return;");
sc.add("map.put(element, value);");
sc.add("}");
sc.add("}");
sc.addLineBreak();
sc.add("private void setMapValueToMax(" + E_MAP + "<" + E_OBJECT + ", Integer> map, " + E_OBJECT + " element, int value) {");
sc.add("// we need to synchronize the write access, because other threads may iterate");
sc.add("// over the map concurrently");
sc.add("synchronized (this) {");
sc.add("if (element == null || value < 0) return;");
sc.add("if (map.containsKey(element) && map.get(element) > value) return;");
sc.add("map.put(element, value);");
sc.add("}");
sc.add("}");
sc.addLineBreak();
sc.add("public " + LIST + "<" + E_OBJECT + "> getElementsAt(final int documentOffset) {");
sc.add(LIST + "<" + E_OBJECT + "> result = getElements(new ISelector() {");
sc.add("public boolean accept(int start, int end) {");
sc.add("return start <= documentOffset && end >= documentOffset;");
sc.add("}");
sc.add("});");
sc.add("return result;");
sc.add("}");
sc.addLineBreak();
sc.add("public " + LIST + "<" + E_OBJECT + "> getElementsBetween(final int startOffset, final int endOffset) {");
sc.add("" + LIST + "<" + E_OBJECT + "> result = getElements(new ISelector() {");
sc.add("public boolean accept(int start, int end) {");
sc.add("return start >= startOffset && end <= endOffset;");
sc.add("}");
sc.add("});");
sc.add("return result;");
sc.add("}");
sc.addLineBreak();
sc.add("private " + LIST + "<" + E_OBJECT + "> getElements(ISelector s) {");
sc.add("// there might be more than one element at the given offset");
sc.add("// thus, we collect all of them and sort them afterwards");
sc.add(LIST + "<" + E_OBJECT + "> result = new " + ARRAY_LIST + "<" + E_OBJECT + ">();");
sc.addLineBreak();
sc.add("// we need to synchronize the iteration over the map, because");
sc.add("// other threads may write to the map concurrently");
sc.add("synchronized (this) {");
sc.add("for (" + E_OBJECT + " next : charStartMap.keySet()) {");
sc.add("int start = charStartMap.get(next);");
sc.add("int end = charEndMap.get(next);");
sc.add("if (s.accept(start, end)) {");
sc.add("result.add(next);");
sc.add("}");
sc.add("}");
sc.add("}");
sc.add(COLLECTIONS + ".sort(result, new " + COMPARATOR + "<" + E_OBJECT + ">() {");
sc.add("public int compare(" + E_OBJECT + " objectA, " + E_OBJECT + " objectB) {");
sc.add("int lengthA = getCharEnd(objectA) - getCharStart(objectA);");
sc.add("int lengthB = getCharEnd(objectB) - getCharStart(objectB);");
sc.add("return lengthA - lengthB;");
sc.add("}");
sc.add("});");
sc.add("return result;");
sc.add("}");
sc.add("}");
out.print(sc.toString());
return true;
}
|
diff --git a/obdalib/obdalib-core/src/main/java/it/unibz/krdb/obda/io/R2RMLParser.java b/obdalib/obdalib-core/src/main/java/it/unibz/krdb/obda/io/R2RMLParser.java
index 457069c14..08b45d5c2 100644
--- a/obdalib/obdalib-core/src/main/java/it/unibz/krdb/obda/io/R2RMLParser.java
+++ b/obdalib/obdalib-core/src/main/java/it/unibz/krdb/obda/io/R2RMLParser.java
@@ -1,657 +1,657 @@
package it.unibz.krdb.obda.io;
import it.unibz.krdb.obda.model.Constant;
import it.unibz.krdb.obda.model.DataTypePredicate;
import it.unibz.krdb.obda.model.Function;
import it.unibz.krdb.obda.model.NewLiteral;
import it.unibz.krdb.obda.model.OBDADataFactory;
import it.unibz.krdb.obda.model.Predicate;
import it.unibz.krdb.obda.model.Predicate.COL_TYPE;
import it.unibz.krdb.obda.model.impl.DataTypePredicateImpl;
import it.unibz.krdb.obda.model.impl.OBDADataFactoryImpl;
import it.unibz.krdb.obda.model.impl.OBDAVocabulary;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.openrdf.model.BNode;
import org.openrdf.model.Graph;
import org.openrdf.model.Resource;
import org.openrdf.model.Statement;
import org.openrdf.model.URI;
import org.openrdf.model.Value;
import org.openrdf.model.ValueFactory;
import org.openrdf.model.impl.ValueFactoryImpl;
import org.openrdf.model.util.GraphUtil;
public class R2RMLParser {
private final String baseuri = "http://example.com/base/";
public static final ValueFactory fact = new ValueFactoryImpl();
public final URI TriplesMap = fact.createURI("http://www.w3.org/ns/r2rml#TriplesMap");
public final URI logicalTable = fact.createURI("http://www.w3.org/ns/r2rml#logicalTable");
public final URI tableName = fact.createURI("http://www.w3.org/ns/r2rml#tableName");
public final URI baseTableOrView = fact.createURI("http://www.w3.org/ns/r2rml#baseTableOrView");
public final URI r2rmlView = fact.createURI("http://www.w3.org/ns/r2rml#R2RMLView");
public final URI subjectMap = fact.createURI("http://www.w3.org/ns/r2rml#subjectMap");
public final URI subject = fact.createURI("http://www.w3.org/ns/r2rml#subject");
public final URI predicateObjectMap = fact.createURI("http://www.w3.org/ns/r2rml#predicateObjectMap");
public final URI predicateMap = fact.createURI("http://www.w3.org/ns/r2rml#predicateMap");
public final URI objectMap = fact.createURI("http://www.w3.org/ns/r2rml#objectMap");
public final URI object = fact.createURI("http://www.w3.org/ns/r2rml#object");
public final URI refObjectMap = fact.createURI("http://www.w3.org/ns/r2rml#refObjectMap");
public final URI graphMap = fact.createURI("http://www.w3.org/ns/r2rml#graphMap");
public final URI graph = fact.createURI("http://www.w3.org/ns/r2rml#graph");
public final URI predicate = fact.createURI("http://www.w3.org/ns/r2rml#predicate");
public final URI template = fact.createURI("http://www.w3.org/ns/r2rml#template");
public final URI column = fact.createURI("http://www.w3.org/ns/r2rml#column");
public final URI constant = fact.createURI("http://www.w3.org/ns/r2rml#constant");
public final URI termType = fact.createURI("http://www.w3.org/ns/r2rml#termType");
public final URI language = fact.createURI("http://www.w3.org/ns/r2rml#language");
public final URI datatype = fact.createURI("http://www.w3.org/ns/r2rml#datatype");
public final URI inverseExpression = fact.createURI("http://www.w3.org/ns/r2rml#inverseExpression");
public final URI iri = fact.createURI("http://www.w3.org/ns/r2rml#IRI");
public final URI blankNode = fact.createURI("http://www.w3.org/ns/r2rml#BlankNode");
public final URI literal = fact.createURI("http://www.w3.org/ns/r2rml#Literal");
public final URI classUri = fact.createURI("http://www.w3.org/ns/r2rml#class");
public final URI sqlQuery = fact.createURI("http://www.w3.org/ns/r2rml#sqlQuery");
public final URI sqlVersion = fact.createURI("http://www.w3.org/ns/r2rml#sqlVersion");
public final URI parentTriplesMap = fact.createURI("http://www.w3.org/ns/r2rml#parentTriplesMap");
public final URI joinCondition = fact.createURI("http://www.w3.org/ns/r2rml#joinCondition");
public final URI child = fact.createURI("http://www.w3.org/ns/r2rml#child");
public final URI parent = fact.createURI("http://www.w3.org/ns/r2rml#parent");
private OBDADataFactory fac = OBDADataFactoryImpl.getInstance();
Iterator<Statement> iterator, newiterator;
List<Predicate> classPredicates = new ArrayList<Predicate>();
List<Resource> joinPredObjNodes = new ArrayList<Resource>();
String parsedString = "";
String subjectString = "";
String objectString = "";
String basePrefix = "";
public R2RMLParser() {
}
/*
* method to get the Resource nodes (TripleMaps) from the given Graph
*/
public Set<Resource> getMappingNodes(Graph myGraph)
{
Set<Resource> resources = GraphUtil.getSubjects(myGraph, logicalTable, null);
Set<Resource> nodes = new HashSet<Resource>();
for (Resource subj : resources) {
//add resource if it's a triplesMap declaration
iterator = myGraph.match(subj, fact.createURI(OBDAVocabulary.RDF_TYPE), TriplesMap);
if (iterator.hasNext()) {
nodes.add(subj);
}
}
return nodes;
}
/*
* method to return an sql string from a given Resource node in the Graph
*/
public String getSQLQuery(Graph myGraph, Resource subj) {
// System.out.println(subj.toString());
Iterator<Statement> tableit;
String sqlString;
// search for logicalTable declaration
Set<Value> objects = GraphUtil.getObjects(myGraph, subj, logicalTable);
if (objects.size() > 0){
Resource object = (Resource) objects.toArray()[0];
if (object instanceof BNode) {
//look for tableName declaration
String tableName = getSQLTableName(myGraph, object);
if (!tableName.isEmpty())
return ("SELECT * FROM " +tableName);
// search for sqlQuery declarations
sqlString = getSQLQueryString(myGraph, object);
if (!sqlString.isEmpty())
return sqlString;
// search for r2rmlview declaration
tableit = myGraph.match(object, r2rmlView, null);
if (tableit.hasNext()) {
Resource objectt = (Resource) tableit.next().getObject();
//look for sqlquery declaration
return getSQLQueryString(myGraph, objectt);
}
// search for basetableorview declaration
tableit = myGraph.match(object, baseTableOrView, null,
(Resource) null);
if (tableit.hasNext()) {
Resource objectt = (Resource) tableit.next().getObject();
//seach table name in basetableview definition
return ("SELECT * FROM " + getSQLTableName(myGraph, objectt));
}
} else {
//logicalTable not a node, only sqlquery
return getSQLQueryString(myGraph, object);
}
}
return "";
}
private String getSQLQueryString(Graph myGraph, Resource object) {
// search for sqlQuery declarations
Iterator<Statement> tableit = myGraph.match(object, sqlQuery, null);
if (tableit.hasNext()) {
String sqlString = tableit.next().getObject().toString();
// System.out.println(sqlString);
sqlString = trim(sqlString).trim();
if (sqlString.endsWith(";"))
sqlString = sqlString.substring(0, sqlString.length()-1);
return (sqlString);
}
return "";
}
private String getSQLTableName(Graph myGraph, Resource object){
//look for tableName declaration
Iterator<Statement> newiterator = myGraph.match(object, tableName, null);
if (newiterator.hasNext()) {
String sqlString = newiterator.next().getObject().toString();
// System.out.println(sqlString);
return trimTo1(sqlString);
}
return "";
}
public List<Predicate> getClassPredicates() {
List<Predicate> classes = new ArrayList<Predicate>();
for (Predicate p: classPredicates)
classes.add(p);
classPredicates.clear();
return classes;
}
public Set<Resource> getPredicateObjects(Graph myGraph, Resource subj) {
// process PREDICATEOBJECTs
Set<Resource> predobjs = new HashSet<Resource>();
Set<Value> objectss = GraphUtil.getObjects(myGraph, subj, predicateObjectMap);
for (Value objectVal : objectss) {
Resource object = (Resource) objectVal;
predobjs.add(object);
}
return predobjs;
}
public NewLiteral getSubjectAtom(Graph myGraph, Resource subj)
throws Exception {
return getSubjectAtom(myGraph, subj, "");
}
public NewLiteral getSubjectAtom(Graph myGraph, Resource subj, String joinCond)
throws Exception {
NewLiteral subjectAtom = null;
// process SUBJECT
Set<Value> objects = GraphUtil.getObjects(myGraph, subj, subjectMap);
Resource object = (Resource) objects.toArray()[0];
// process template declaration
iterator = myGraph.match(object, template, null);
if (iterator.hasNext()) {
parsedString = iterator.next().getObject().toString();
// System.out.println(parsedString);
subjectString = trim(parsedString);
// craete uri("...",var)
subjectAtom = getURIFunction((subjectString), joinCond);
}
// process column declaration
iterator = myGraph.match(object, column, null);
if (iterator.hasNext()) {
parsedString = iterator.next().getObject().toString();
// System.out.println(parsedString);
subjectString = trim(parsedString);
subjectAtom = getURIFunction((subjectString), joinCond);
}
// process constant declaration
iterator = myGraph.match(object, constant, null);
if (iterator.hasNext()) {
parsedString = iterator.next().getObject().toString();
// System.out.println(parsedString);
subjectString = trim(parsedString);
subjectAtom = getURIFunction((subjectString), joinCond);
}
// process termType declaration
iterator = myGraph.match(object, termType, null);
if (iterator.hasNext()) {
parsedString = iterator.next().getObject().toString();
// System.out.println(parsedString);
subjectAtom = getTermTypeAtom(parsedString, (subjectString));
}
// process class declaration
iterator = myGraph.match(object, classUri, null);
while (iterator.hasNext()) {
parsedString = iterator.next().getObject().toString();
// System.out.println(parsedString);
// create class(uri("...", var)) and add it to the body
classPredicates.add(fac.getClassPredicate(parsedString));
}
if (subjectAtom == null)
throw new Exception("Error in parsing the subjectMap in node "
+ subj.stringValue());
// System.out.println("subjectatom = " +subjectAtom.toString());
return subjectAtom;
}
public List<Predicate> getBodyPredicates(Graph myGraph, Resource object)
throws Exception {
List<Predicate> bodyPredicates = new ArrayList<Predicate>();
Predicate bodyPredicate = null;
// process PREDICATE
// look for the predicate
iterator = myGraph.match(object, predicate, null);
while (iterator.hasNext()) {
parsedString = iterator.next().getObject().toString();
// System.out.println(parsedString);
bodyPredicate = fac.getPredicate(parsedString, 2);
bodyPredicates.add(bodyPredicate);
}
// process PREDICATEMAP
// look for the predicateMap
iterator = myGraph.match(object, predicateMap, null);
while (iterator.hasNext()) {
Resource objectt = (Resource) (iterator.next().getObject());
// process constant declaration
Iterator<Statement> newiterator = myGraph.match(objectt, constant, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// System.out.println(parsedString);
bodyPredicate = fac.getPredicate(parsedString, 2);
bodyPredicates.add(bodyPredicate);
}
}
if (bodyPredicate == null)
throw new Exception("Error in parsing the predicate");
return bodyPredicates;
}
public NewLiteral getObjectAtom(Graph myGraph, Resource objectt)
throws Exception {
return getObjectAtom(myGraph, objectt, "");
}
public NewLiteral getObjectAtom(Graph myGraph, Resource objectt, String joinCond)
throws Exception {
NewLiteral objectAtom = null;
// process OBJECT
// look for the object
iterator = myGraph.match(objectt, this.object, null);
if (iterator.hasNext()) {
parsedString = iterator.next().getObject().toString();
// System.out.println(parsedString);
//uriconstant
if(parsedString.startsWith("http://"))
objectAtom = fac.getURIConstant(OBDADataFactoryImpl.getIRI(parsedString));
else
{
//valueconstant
- Predicate pred = fac.getUriPredicate();
+ Predicate pred = fac.getUriTemplatePredicate(1);
NewLiteral newlit = fac.getValueConstant(trim(parsedString));
objectAtom = fac.getFunctionalTerm(pred, newlit);
}
}
// process OBJECTMAP
iterator = myGraph.match(objectt, objectMap, null);
if (iterator.hasNext()) {
Resource object = (Resource) (iterator.next().getObject());
// look for column declaration
newiterator = myGraph.match(object, column, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
objectString = trim(parsedString);
// System.out.println(parsedString);
if (!joinCond.isEmpty())
objectString = joinCond+(objectString);
objectAtom = fac.getVariable(objectString);
}
// look for constant declaration
newiterator = myGraph.match(object, constant, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// System.out.println(parsedString);
objectString = trim(parsedString);
if (objectString.contains("^^"))
objectAtom = getExplicitTypedObject(objectString);
else
objectAtom = getConstantObject(objectString);
}
// look for template declaration
newiterator = myGraph.match(object, template, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// craete uri("...",var)
objectString = trimTo1(parsedString);
objectAtom = getTypedFunction(trim(objectString), 1, joinCond);
}
// process termType declaration
newiterator = myGraph.match(object, termType, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// System.out.println(parsedString);
objectAtom = getTermTypeAtom(parsedString, (objectString));
}
// look for language declaration
newiterator = myGraph.match(object, language, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// System.out.println(parsedString);
NewLiteral lang = fac.getValueConstant(trim(parsedString.toLowerCase()));
//create literal(object, lang) atom
Predicate literal = OBDAVocabulary.RDFS_LITERAL_LANG;
NewLiteral langAtom = fac.getFunctionalTerm(literal, objectAtom, lang);
objectAtom = langAtom;
}
// look for datatype declaration
newiterator = myGraph.match(object, datatype, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// System.out.println(parsedString);
//create datatype(object) atom
Predicate dtype = new DataTypePredicateImpl(OBDADataFactoryImpl.getIRI(parsedString), COL_TYPE.OBJECT);
NewLiteral dtAtom = fac.getFunctionalTerm(dtype, objectAtom);
objectAtom = dtAtom;
}
}
return objectAtom;
}
private NewLiteral getConstantObject(String objectString) {
if (objectString.startsWith("http:"))
return getURIFunction(objectString);
else
{ //literal
Constant constt = fac.getValueConstant(objectString);
Predicate pred = fac.getDataTypePredicateLiteral();
return fac.getFunctionalTerm(pred, constt);
}
}
private NewLiteral getExplicitTypedObject(String string) {
NewLiteral typedObject = null;
String[] strings = string.split("<");
if (strings.length > 1) {
String consts = strings[0];
consts = consts.substring(0, consts.length()-2);
consts = trim(consts);
String type = strings[1];
if (type.endsWith(">"))
type = type.substring(0, type.length() - 1);
DataTypePredicate predicate = new DataTypePredicateImpl(OBDADataFactoryImpl.getIRI(type), COL_TYPE.OBJECT);
//fac.getDataPropertyPredicate(OBDADataFactoryImpl.getIRI(type));
NewLiteral constant = fac.getValueConstant(consts);
typedObject = fac.getFunctionalTerm(predicate, constant);
}
return typedObject;
}
public List<Resource> getJoinNodes(Graph myGraph, Resource termMap)
{
List<Resource> joinPredObjNodes = new ArrayList<Resource>();
// get predicate-object nodes
Set<Resource> predicateObjectNodes = getPredicateObjects(myGraph, termMap);
for (Resource predobj : predicateObjectNodes) {
// for each predicate object map
// process OBJECTMAP
iterator = myGraph.match(predobj, objectMap, null);
if (iterator.hasNext()) {
Resource objectt = (Resource) (iterator.next().getObject());
// look for parentTriplesMap declaration
newiterator = myGraph.match(objectt, parentTriplesMap, null);
if (newiterator.hasNext()) {
// found a join condition, add the predicateobject node to the list
joinPredObjNodes.add(predobj);
}
}
}
return joinPredObjNodes;
}
private Function getTermTypeAtom(String type, String string) {
if (type.contentEquals(iri.stringValue())) {
return getURIFunction(string);
} else if (type.contentEquals(blankNode.stringValue())) {
return getTypedFunction(string, 2);
} else if (type.contentEquals(literal.stringValue())) {
return getTypedFunction(trim(string), 3);
}
return null;
}
private Function getURIFunction(String string, String joinCond) {
return getTypedFunction(string, 1, joinCond);
}
private Function getURIFunction(String string) {
return getTypedFunction(string, 1);
}
public Function getTypedFunction(String parsedString, int type) {
return getTypedFunction(parsedString, type, "");
}
public Function getTypedFunction(String parsedString, int type, String joinCond) {
List<NewLiteral> terms = new ArrayList<NewLiteral>();
String string = (parsedString);
if (!string.contains("{"))
if (!string.startsWith("http://"))
{ string = baseuri + "{" + string + "}";
if (type == 2)
string = "\"" + string + "\"";
}
else
{
type = 0;
}
if (type == 1 && !string.startsWith("http://"))
string = baseuri + string;
string = string.replace("\\{", "[");
string = string.replace("\\}", "]");
while (string.contains("{") ) {
int end = string.indexOf("}");
int begin = string.lastIndexOf("{", end);
String var = trim(string.substring(begin + 1, end));
//trim for making variable
terms.add(fac.getVariable(joinCond+(var)));
string = string.replace("{\"" + var + "\"}", "[]");
string = string.replace("{" + var + "}", "[]");
}
string = string.replace("[", "{");
string = string.replace("]", "}");
NewLiteral uriTemplate = null;
Predicate pred = null;
switch (type) {
//constant uri
case 0:
uriTemplate = fac.getURIConstant(string);
pred = fac.getUriTemplatePredicate(terms.size());
break;
// URI or IRI
case 1:
uriTemplate = fac.getValueConstant(string);
pred = fac.getUriTemplatePredicate(terms.size());
break;
// BNODE
case 2:
uriTemplate = fac.getBNodeConstant(string);
pred = fac.getBNodeTemplatePredicate(terms.size());
break;
// LITERAL
case 3:
uriTemplate = fac.getValueConstant(string);
pred = OBDAVocabulary.RDFS_LITERAL_LANG;//lang?
terms.add(OBDAVocabulary.NULL);
break;
}
// the URI template is always on the first position in the term list
terms.add(0, uriTemplate);
return fac.getFunctionalTerm(pred, terms);
}
private String trim(String string) {
while (string.startsWith("\"") && string.endsWith("\"")) {
string = string.substring(1, string.length() - 1);
}
return string;
}
private String trimTo1(String string) {
while (string.startsWith("\"\"") && string.endsWith("\"\"")) {
string = string.substring(1, string.length() - 1);
}
return string;
}
public Resource getReferencedTripleMap(Graph myGraph, Resource predobjNode) {
// process OBJECTMAP
iterator = myGraph.match(predobjNode, objectMap, null);
if (iterator.hasNext()) {
Resource object = (Resource) (iterator.next().getObject());
// look for parentTriplesMap declaration
newiterator = myGraph.match(object, parentTriplesMap, null);
if (newiterator.hasNext()) {
return (Resource)newiterator.next().getObject();
}
}
return null;
}
public String getChildColumn(Graph myGraph, Resource predobjNode) {
// process OBJECTMAP
iterator = myGraph.match(predobjNode, objectMap, null);
if (iterator.hasNext()) {
Resource object = (Resource) (iterator.next().getObject());
// look for joincondition declaration
newiterator = myGraph.match(object, joinCondition, null);
if (newiterator.hasNext()) {
Resource objectt = (Resource) (newiterator.next().getObject());
// look for child declaration
Iterator<Statement> newiterator2 = myGraph.match(objectt, child, null);
if (newiterator2.hasNext()) {
return trimTo1(newiterator2.next().getObject().stringValue());
}
}
}
return null;
}
public String getParentColumn(Graph myGraph, Resource predobjNode) {
// process OBJECTMAP
iterator = myGraph.match(predobjNode, objectMap, null);
if (iterator.hasNext()) {
Resource object = (Resource) (iterator.next().getObject());
// look for joincondition declaration
newiterator = myGraph.match(object, joinCondition, null);
if (newiterator.hasNext()) {
Resource objectt = (Resource) (newiterator.next().getObject());
// look for parent declaration
Iterator<Statement> newiterator2 = myGraph.match(objectt, parent, null);
if (newiterator2.hasNext()) {
return trimTo1(newiterator2.next().getObject().stringValue());
}
}
}
return null;
}
}
| true
| true
|
public NewLiteral getObjectAtom(Graph myGraph, Resource objectt, String joinCond)
throws Exception {
NewLiteral objectAtom = null;
// process OBJECT
// look for the object
iterator = myGraph.match(objectt, this.object, null);
if (iterator.hasNext()) {
parsedString = iterator.next().getObject().toString();
// System.out.println(parsedString);
//uriconstant
if(parsedString.startsWith("http://"))
objectAtom = fac.getURIConstant(OBDADataFactoryImpl.getIRI(parsedString));
else
{
//valueconstant
Predicate pred = fac.getUriPredicate();
NewLiteral newlit = fac.getValueConstant(trim(parsedString));
objectAtom = fac.getFunctionalTerm(pred, newlit);
}
}
// process OBJECTMAP
iterator = myGraph.match(objectt, objectMap, null);
if (iterator.hasNext()) {
Resource object = (Resource) (iterator.next().getObject());
// look for column declaration
newiterator = myGraph.match(object, column, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
objectString = trim(parsedString);
// System.out.println(parsedString);
if (!joinCond.isEmpty())
objectString = joinCond+(objectString);
objectAtom = fac.getVariable(objectString);
}
// look for constant declaration
newiterator = myGraph.match(object, constant, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// System.out.println(parsedString);
objectString = trim(parsedString);
if (objectString.contains("^^"))
objectAtom = getExplicitTypedObject(objectString);
else
objectAtom = getConstantObject(objectString);
}
// look for template declaration
newiterator = myGraph.match(object, template, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// craete uri("...",var)
objectString = trimTo1(parsedString);
objectAtom = getTypedFunction(trim(objectString), 1, joinCond);
}
// process termType declaration
newiterator = myGraph.match(object, termType, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// System.out.println(parsedString);
objectAtom = getTermTypeAtom(parsedString, (objectString));
}
// look for language declaration
newiterator = myGraph.match(object, language, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// System.out.println(parsedString);
NewLiteral lang = fac.getValueConstant(trim(parsedString.toLowerCase()));
//create literal(object, lang) atom
Predicate literal = OBDAVocabulary.RDFS_LITERAL_LANG;
NewLiteral langAtom = fac.getFunctionalTerm(literal, objectAtom, lang);
objectAtom = langAtom;
}
// look for datatype declaration
newiterator = myGraph.match(object, datatype, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// System.out.println(parsedString);
//create datatype(object) atom
Predicate dtype = new DataTypePredicateImpl(OBDADataFactoryImpl.getIRI(parsedString), COL_TYPE.OBJECT);
NewLiteral dtAtom = fac.getFunctionalTerm(dtype, objectAtom);
objectAtom = dtAtom;
}
}
return objectAtom;
}
|
public NewLiteral getObjectAtom(Graph myGraph, Resource objectt, String joinCond)
throws Exception {
NewLiteral objectAtom = null;
// process OBJECT
// look for the object
iterator = myGraph.match(objectt, this.object, null);
if (iterator.hasNext()) {
parsedString = iterator.next().getObject().toString();
// System.out.println(parsedString);
//uriconstant
if(parsedString.startsWith("http://"))
objectAtom = fac.getURIConstant(OBDADataFactoryImpl.getIRI(parsedString));
else
{
//valueconstant
Predicate pred = fac.getUriTemplatePredicate(1);
NewLiteral newlit = fac.getValueConstant(trim(parsedString));
objectAtom = fac.getFunctionalTerm(pred, newlit);
}
}
// process OBJECTMAP
iterator = myGraph.match(objectt, objectMap, null);
if (iterator.hasNext()) {
Resource object = (Resource) (iterator.next().getObject());
// look for column declaration
newiterator = myGraph.match(object, column, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
objectString = trim(parsedString);
// System.out.println(parsedString);
if (!joinCond.isEmpty())
objectString = joinCond+(objectString);
objectAtom = fac.getVariable(objectString);
}
// look for constant declaration
newiterator = myGraph.match(object, constant, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// System.out.println(parsedString);
objectString = trim(parsedString);
if (objectString.contains("^^"))
objectAtom = getExplicitTypedObject(objectString);
else
objectAtom = getConstantObject(objectString);
}
// look for template declaration
newiterator = myGraph.match(object, template, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// craete uri("...",var)
objectString = trimTo1(parsedString);
objectAtom = getTypedFunction(trim(objectString), 1, joinCond);
}
// process termType declaration
newiterator = myGraph.match(object, termType, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// System.out.println(parsedString);
objectAtom = getTermTypeAtom(parsedString, (objectString));
}
// look for language declaration
newiterator = myGraph.match(object, language, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// System.out.println(parsedString);
NewLiteral lang = fac.getValueConstant(trim(parsedString.toLowerCase()));
//create literal(object, lang) atom
Predicate literal = OBDAVocabulary.RDFS_LITERAL_LANG;
NewLiteral langAtom = fac.getFunctionalTerm(literal, objectAtom, lang);
objectAtom = langAtom;
}
// look for datatype declaration
newiterator = myGraph.match(object, datatype, null);
if (newiterator.hasNext()) {
parsedString = newiterator.next().getObject().toString();
// System.out.println(parsedString);
//create datatype(object) atom
Predicate dtype = new DataTypePredicateImpl(OBDADataFactoryImpl.getIRI(parsedString), COL_TYPE.OBJECT);
NewLiteral dtAtom = fac.getFunctionalTerm(dtype, objectAtom);
objectAtom = dtAtom;
}
}
return objectAtom;
}
|
diff --git a/src/main/java/org/springframework/shell/JLineLogHandler.java b/src/main/java/org/springframework/shell/JLineLogHandler.java
index d1286ae..ebb5187 100644
--- a/src/main/java/org/springframework/shell/JLineLogHandler.java
+++ b/src/main/java/org/springframework/shell/JLineLogHandler.java
@@ -1,212 +1,211 @@
package org.springframework.shell;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.logging.Formatter;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import jline.ANSIBuffer;
import jline.ConsoleReader;
import org.springframework.roo.shell.ShellPromptAccessor;
import org.springframework.roo.support.util.Assert;
import org.springframework.roo.support.util.IOUtils;
import org.springframework.roo.support.util.OsUtils;
import org.springframework.roo.support.util.StringUtils;
/**
* JDK logging {@link Handler} that emits log messages to a JLine {@link ConsoleReader}.
*
* @author Ben Alex
* @since 1.0
*/
public class JLineLogHandler extends Handler {
// Constants
private static final boolean BRIGHT_COLORS = Boolean.getBoolean("roo.bright");
// Fields
private ConsoleReader reader;
private ShellPromptAccessor shellPromptAccessor;
private static ThreadLocal<Boolean> redrawProhibit = new ThreadLocal<Boolean>();
private static String lastMessage;
private static boolean includeThreadName = false;
private boolean ansiSupported;
private String userInterfaceThreadName;
private static boolean suppressDuplicateMessages = true;
public JLineLogHandler(final ConsoleReader reader, final ShellPromptAccessor shellPromptAccessor) {
Assert.notNull(reader, "Console reader required");
Assert.notNull(shellPromptAccessor, "Shell prompt accessor required");
this.reader = reader;
this.shellPromptAccessor = shellPromptAccessor;
this.userInterfaceThreadName = Thread.currentThread().getName();
this.ansiSupported = reader.getTerminal().isANSISupported();
setFormatter(new Formatter() {
@Override
public String format(final LogRecord record) {
StringBuffer sb = new StringBuffer();
if (record.getMessage() != null) {
sb.append(record.getMessage()).append(StringUtils.LINE_SEPARATOR);
}
if (record.getThrown() != null) {
PrintWriter pw = null;
try {
StringWriter sw = new StringWriter();
pw = new PrintWriter(sw);
record.getThrown().printStackTrace(pw);
sb.append(sw.toString());
} catch (Exception ex) {
} finally {
IOUtils.closeQuietly(pw);
}
}
return sb.toString();
}
});
}
@Override
public void flush() {}
@Override
public void close() throws SecurityException {}
public static void prohibitRedraw() {
redrawProhibit.set(true);
}
public static void cancelRedrawProhibition() {
redrawProhibit.remove();
}
public static void setIncludeThreadName(final boolean include) {
includeThreadName = include;
}
public static void resetMessageTracking() {
lastMessage = null; // see ROO-251
}
public static boolean isSuppressDuplicateMessages() {
return suppressDuplicateMessages;
}
public static void setSuppressDuplicateMessages(final boolean suppressDuplicateMessages) {
JLineLogHandler.suppressDuplicateMessages = suppressDuplicateMessages;
}
@Override
public void publish(final LogRecord record) {
try {
// Avoid repeating the same message that displayed immediately before the current message (ROO-30, ROO-1873)
String toDisplay = toDisplay(record);
if (toDisplay.equals(lastMessage) && suppressDuplicateMessages) {
return;
}
lastMessage = toDisplay;
StringBuffer buffer = reader.getCursorBuffer().getBuffer();
int cursor = reader.getCursorBuffer().cursor;
if (reader.getCursorBuffer().length() > 0) {
// The user has semi-typed something, so put a new line in so the debug message is separated
reader.printNewline();
// We need to cancel whatever they typed (it's reset later on), so the line appears empty
reader.getCursorBuffer().setBuffer(new StringBuffer());
reader.getCursorBuffer().cursor = 0;
}
// This ensures nothing is ever displayed when redrawing the line
reader.setDefaultPrompt("");
- reader.redrawLine();
reader.printString(toDisplay);
// Now restore the line formatting settings back to their original
reader.setDefaultPrompt(shellPromptAccessor.getShellPrompt());
reader.getCursorBuffer().setBuffer(buffer);
reader.getCursorBuffer().cursor = cursor;
Boolean prohibitingRedraw = redrawProhibit.get();
if (prohibitingRedraw == null) {
reader.redrawLine();
}
reader.flushConsole();
} catch (Exception e) {
reportError("Could not publish log message", e, Level.SEVERE.intValue());
}
}
private String toDisplay(final LogRecord event) {
StringBuilder sb = new StringBuilder();
String threadName;
String eventString;
if (includeThreadName && !userInterfaceThreadName.equals(Thread.currentThread().getName()) && !"".equals(Thread.currentThread().getName())) {
threadName = "[" + Thread.currentThread().getName() + "]";
// Build an event string that will indent nicely given the left hand side now contains a thread name
StringBuilder lineSeparatorAndIndentingString = new StringBuilder();
for (int i = 0; i <= threadName.length(); i++) {
lineSeparatorAndIndentingString.append(" ");
}
eventString = " " + getFormatter().format(event).replace(StringUtils.LINE_SEPARATOR, StringUtils.LINE_SEPARATOR + lineSeparatorAndIndentingString.toString());
if (eventString.endsWith(lineSeparatorAndIndentingString.toString())) {
eventString = eventString.substring(0, eventString.length() - lineSeparatorAndIndentingString.length());
}
} else {
threadName = "";
eventString = getFormatter().format(event);
}
if (ansiSupported) {
if (event.getLevel().intValue() >= Level.SEVERE.intValue()) {
sb.append(getANSIBuffer().reverse(threadName).red(eventString));
} else if (event.getLevel().intValue() >= Level.WARNING.intValue()) {
sb.append(getANSIBuffer().reverse(threadName).magenta(eventString));
} else if (event.getLevel().intValue() >= Level.INFO.intValue()) {
sb.append(getANSIBuffer().reverse(threadName).green(eventString));
} else {
sb.append(getANSIBuffer().reverse(threadName).append(eventString));
}
} else {
sb.append(threadName).append(eventString);
}
return sb.toString();
}
/**
* Makes text brighter if requested through system property 'roo.bright' and
* works around issue on Windows in using reverse() in combination with the
* Jansi lib, which leaves its 'negative' flag set unless reset explicitly.
*
* @return new patched ANSIBuffer
*/
public static ANSIBuffer getANSIBuffer() {
final char esc = (char) 27;
return new ANSIBuffer() {
@Override
public ANSIBuffer reverse(final String str) {
if (OsUtils.isWindows()) {
return super.reverse(str).append(ANSICodes.attrib(esc));
}
return super.reverse(str);
};
@Override
public ANSIBuffer attrib(final String str, final int code) {
if (BRIGHT_COLORS && 30 <= code && code <= 37) {
// This is a color code: add a 'bright' code
return append(esc + "[" + code + ";1m").append(str).append(ANSICodes.attrib(0));
}
return super.attrib(str, code);
}
};
}
}
| true
| true
|
public void publish(final LogRecord record) {
try {
// Avoid repeating the same message that displayed immediately before the current message (ROO-30, ROO-1873)
String toDisplay = toDisplay(record);
if (toDisplay.equals(lastMessage) && suppressDuplicateMessages) {
return;
}
lastMessage = toDisplay;
StringBuffer buffer = reader.getCursorBuffer().getBuffer();
int cursor = reader.getCursorBuffer().cursor;
if (reader.getCursorBuffer().length() > 0) {
// The user has semi-typed something, so put a new line in so the debug message is separated
reader.printNewline();
// We need to cancel whatever they typed (it's reset later on), so the line appears empty
reader.getCursorBuffer().setBuffer(new StringBuffer());
reader.getCursorBuffer().cursor = 0;
}
// This ensures nothing is ever displayed when redrawing the line
reader.setDefaultPrompt("");
reader.redrawLine();
reader.printString(toDisplay);
// Now restore the line formatting settings back to their original
reader.setDefaultPrompt(shellPromptAccessor.getShellPrompt());
reader.getCursorBuffer().setBuffer(buffer);
reader.getCursorBuffer().cursor = cursor;
Boolean prohibitingRedraw = redrawProhibit.get();
if (prohibitingRedraw == null) {
reader.redrawLine();
}
reader.flushConsole();
} catch (Exception e) {
reportError("Could not publish log message", e, Level.SEVERE.intValue());
}
}
|
public void publish(final LogRecord record) {
try {
// Avoid repeating the same message that displayed immediately before the current message (ROO-30, ROO-1873)
String toDisplay = toDisplay(record);
if (toDisplay.equals(lastMessage) && suppressDuplicateMessages) {
return;
}
lastMessage = toDisplay;
StringBuffer buffer = reader.getCursorBuffer().getBuffer();
int cursor = reader.getCursorBuffer().cursor;
if (reader.getCursorBuffer().length() > 0) {
// The user has semi-typed something, so put a new line in so the debug message is separated
reader.printNewline();
// We need to cancel whatever they typed (it's reset later on), so the line appears empty
reader.getCursorBuffer().setBuffer(new StringBuffer());
reader.getCursorBuffer().cursor = 0;
}
// This ensures nothing is ever displayed when redrawing the line
reader.setDefaultPrompt("");
reader.printString(toDisplay);
// Now restore the line formatting settings back to their original
reader.setDefaultPrompt(shellPromptAccessor.getShellPrompt());
reader.getCursorBuffer().setBuffer(buffer);
reader.getCursorBuffer().cursor = cursor;
Boolean prohibitingRedraw = redrawProhibit.get();
if (prohibitingRedraw == null) {
reader.redrawLine();
}
reader.flushConsole();
} catch (Exception e) {
reportError("Could not publish log message", e, Level.SEVERE.intValue());
}
}
|
diff --git a/core/src/main/java/hudson/tasks/Ant.java b/core/src/main/java/hudson/tasks/Ant.java
index c667cba..f69bee5 100644
--- a/core/src/main/java/hudson/tasks/Ant.java
+++ b/core/src/main/java/hudson/tasks/Ant.java
@@ -1,251 +1,251 @@
package hudson.tasks;
import hudson.CopyOnWrite;
import hudson.Launcher;
import hudson.Util;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Descriptor;
import hudson.model.Project;
import hudson.util.FormFieldValidator;
import hudson.util.ArgumentListBuilder;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.kohsuke.stapler.DataBoundConstructor;
import javax.servlet.ServletException;
import java.io.File;
import java.io.IOException;
import java.util.Map;
/**
* @author Kohsuke Kawaguchi
*/
public class Ant extends Builder {
/**
* The targets, properties, and other Ant options.
* Either separated by whitespace or newline.
*/
private final String targets;
/**
* Identifies {@link AntInstallation} to be used.
*/
private final String antName;
/**
* ANT_OPTS if not null.
*/
private final String antOpts;
@DataBoundConstructor
public Ant(String targets,String antName, String antOpts) {
this.targets = targets;
this.antName = antName;
this.antOpts = Util.fixEmpty(antOpts.trim());
}
public String getTargets() {
return targets;
}
/**
* Gets the Ant to invoke,
* or null to invoke the default one.
*/
public AntInstallation getAnt() {
for( AntInstallation i : DESCRIPTOR.getInstallations() ) {
if(antName!=null && i.getName().equals(antName))
return i;
}
return null;
}
/**
* Gets the ANT_OPTS parameter, or null.
*/
public String getAntOpts() {
return antOpts;
}
- public boolean perform(Build build, Launcher launcher, BuildListener listener) {
+ public boolean perform(Build build, Launcher launcher, BuildListener listener) throws InterruptedException {
Project proj = build.getProject();
ArgumentListBuilder args = new ArgumentListBuilder();
String execName;
if(launcher.isUnix())
execName = "ant";
else
execName = "ant.bat";
String normalizedTarget = targets.replaceAll("[\t\r\n]+"," ");
AntInstallation ai = getAnt();
if(ai==null) {
args.add(execName);
} else {
File exec = ai.getExecutable();
if(!ai.getExists()) {
listener.fatalError(exec+" doesn't exist");
return false;
}
args.add(exec.getPath());
}
args.addTokenized(normalizedTarget);
Map<String,String> env = build.getEnvVars();
if(ai!=null)
env.put("ANT_HOME",ai.getAntHome());
if(antOpts!=null)
env.put("ANT_OPTS",antOpts);
if(!launcher.isUnix()) {
// on Windows, executing batch file can't return the correct error code,
// so we need to wrap it into cmd.exe.
// double %% is needed because we want ERRORLEVEL to be expanded after
// batch file executed, not before. This alone shows how broken Windows is...
args.prepend("cmd.exe","/C");
args.add("&&","exit","%%ERRORLEVEL%%");
}
try {
int r = launcher.launch(args.toCommandArray(),env,listener.getLogger(),proj.getModuleRoot()).join();
return r==0;
} catch (IOException e) {
Util.displayIOException(e,listener);
e.printStackTrace( listener.fatalError("command execution failed") );
return false;
}
}
public Descriptor<Builder> getDescriptor() {
return DESCRIPTOR;
}
public static final DescriptorImpl DESCRIPTOR = new DescriptorImpl();
public static final class DescriptorImpl extends Descriptor<Builder> {
@CopyOnWrite
private volatile AntInstallation[] installations = new AntInstallation[0];
private DescriptorImpl() {
super(Ant.class);
load();
}
protected void convert(Map<String,Object> oldPropertyBag) {
if(oldPropertyBag.containsKey("installations"))
installations = (AntInstallation[]) oldPropertyBag.get("installations");
}
public String getHelpFile() {
return "/help/project-config/ant.html";
}
public String getDisplayName() {
return "Invoke top-level Ant targets";
}
public AntInstallation[] getInstallations() {
return installations;
}
public boolean configure(StaplerRequest req) {
boolean r = true;
int i;
String[] names = req.getParameterValues("ant_name");
String[] homes = req.getParameterValues("ant_home");
int len;
if(names!=null && homes!=null)
len = Math.min(names.length,homes.length);
else
len = 0;
AntInstallation[] insts = new AntInstallation[len];
for( i=0; i<len; i++ ) {
if(names[i].length()==0 || homes[i].length()==0) continue;
insts[i] = new AntInstallation(names[i],homes[i]);
}
this.installations = insts;
save();
return r;
}
public Builder newInstance(StaplerRequest req) {
return req.bindParameters(Ant.class,"ant.");
}
//
// web methods
//
/**
* Checks if the ANT_HOME is valid.
*/
public void doCheckAntHome( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
// this can be used to check the existence of a file on the server, so needs to be protected
new FormFieldValidator(req,rsp,true) {
public void check() throws IOException, ServletException {
File f = getFileParameter("value");
if(!f.isDirectory()) {
error(f+" is not a directory");
return;
}
File antJar = new File(f,"lib/ant.jar");
if(!antJar.exists()) {
error(f+" doesn't look like an Ant directory");
return;
}
ok();
}
}.process();
}
}
public static final class AntInstallation {
private final String name;
private final String antHome;
public AntInstallation(String name, String antHome) {
this.name = name;
this.antHome = antHome;
}
/**
* install directory.
*/
public String getAntHome() {
return antHome;
}
/**
* Human readable display name.
*/
public String getName() {
return name;
}
public File getExecutable() {
String execName;
if(File.separatorChar=='\\')
execName = "ant.bat";
else
execName = "ant";
return new File(getAntHome(),"bin/"+execName);
}
/**
* Returns true if the executable exists.
*/
public boolean getExists() {
return getExecutable().exists();
}
}
}
| true
| true
|
public boolean perform(Build build, Launcher launcher, BuildListener listener) {
Project proj = build.getProject();
ArgumentListBuilder args = new ArgumentListBuilder();
String execName;
if(launcher.isUnix())
execName = "ant";
else
execName = "ant.bat";
String normalizedTarget = targets.replaceAll("[\t\r\n]+"," ");
AntInstallation ai = getAnt();
if(ai==null) {
args.add(execName);
} else {
File exec = ai.getExecutable();
if(!ai.getExists()) {
listener.fatalError(exec+" doesn't exist");
return false;
}
args.add(exec.getPath());
}
args.addTokenized(normalizedTarget);
Map<String,String> env = build.getEnvVars();
if(ai!=null)
env.put("ANT_HOME",ai.getAntHome());
if(antOpts!=null)
env.put("ANT_OPTS",antOpts);
if(!launcher.isUnix()) {
// on Windows, executing batch file can't return the correct error code,
// so we need to wrap it into cmd.exe.
// double %% is needed because we want ERRORLEVEL to be expanded after
// batch file executed, not before. This alone shows how broken Windows is...
args.prepend("cmd.exe","/C");
args.add("&&","exit","%%ERRORLEVEL%%");
}
try {
int r = launcher.launch(args.toCommandArray(),env,listener.getLogger(),proj.getModuleRoot()).join();
return r==0;
} catch (IOException e) {
Util.displayIOException(e,listener);
e.printStackTrace( listener.fatalError("command execution failed") );
return false;
}
}
|
public boolean perform(Build build, Launcher launcher, BuildListener listener) throws InterruptedException {
Project proj = build.getProject();
ArgumentListBuilder args = new ArgumentListBuilder();
String execName;
if(launcher.isUnix())
execName = "ant";
else
execName = "ant.bat";
String normalizedTarget = targets.replaceAll("[\t\r\n]+"," ");
AntInstallation ai = getAnt();
if(ai==null) {
args.add(execName);
} else {
File exec = ai.getExecutable();
if(!ai.getExists()) {
listener.fatalError(exec+" doesn't exist");
return false;
}
args.add(exec.getPath());
}
args.addTokenized(normalizedTarget);
Map<String,String> env = build.getEnvVars();
if(ai!=null)
env.put("ANT_HOME",ai.getAntHome());
if(antOpts!=null)
env.put("ANT_OPTS",antOpts);
if(!launcher.isUnix()) {
// on Windows, executing batch file can't return the correct error code,
// so we need to wrap it into cmd.exe.
// double %% is needed because we want ERRORLEVEL to be expanded after
// batch file executed, not before. This alone shows how broken Windows is...
args.prepend("cmd.exe","/C");
args.add("&&","exit","%%ERRORLEVEL%%");
}
try {
int r = launcher.launch(args.toCommandArray(),env,listener.getLogger(),proj.getModuleRoot()).join();
return r==0;
} catch (IOException e) {
Util.displayIOException(e,listener);
e.printStackTrace( listener.fatalError("command execution failed") );
return false;
}
}
|
diff --git a/core/components/components/src/main/java/org/mobicents/slee/container/component/deployment/DeployableUnitJarComponentBuilder.java b/core/components/components/src/main/java/org/mobicents/slee/container/component/deployment/DeployableUnitJarComponentBuilder.java
index 57f1a35d5..81595c73f 100644
--- a/core/components/components/src/main/java/org/mobicents/slee/container/component/deployment/DeployableUnitJarComponentBuilder.java
+++ b/core/components/components/src/main/java/org/mobicents/slee/container/component/deployment/DeployableUnitJarComponentBuilder.java
@@ -1,457 +1,457 @@
package org.mobicents.slee.container.component.deployment;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarInputStream;
import java.util.zip.ZipEntry;
import javax.slee.SLEEException;
import javax.slee.management.DeploymentException;
import org.apache.log4j.Logger;
import org.mobicents.slee.container.component.ComponentManagementImpl;
import org.mobicents.slee.container.component.EventTypeComponentImpl;
import org.mobicents.slee.container.component.LibraryComponentImpl;
import org.mobicents.slee.container.component.ProfileSpecificationComponentImpl;
import org.mobicents.slee.container.component.ResourceAdaptorComponentImpl;
import org.mobicents.slee.container.component.ResourceAdaptorTypeComponentImpl;
import org.mobicents.slee.container.component.SbbComponentImpl;
import org.mobicents.slee.container.component.AbstractSleeComponent;
import org.mobicents.slee.container.component.classloading.URLClassLoaderDomain;
import org.mobicents.slee.container.component.deployment.classloading.URLClassLoaderDomainImpl;
import org.mobicents.slee.container.component.deployment.jaxb.descriptors.EventTypeDescriptorFactoryImpl;
import org.mobicents.slee.container.component.deployment.jaxb.descriptors.EventTypeDescriptorImpl;
import org.mobicents.slee.container.component.deployment.jaxb.descriptors.LibraryDescriptorFactoryImpl;
import org.mobicents.slee.container.component.deployment.jaxb.descriptors.LibraryDescriptorImpl;
import org.mobicents.slee.container.component.deployment.jaxb.descriptors.ProfileSpecificationDescriptorFactoryImpl;
import org.mobicents.slee.container.component.deployment.jaxb.descriptors.ProfileSpecificationDescriptorImpl;
import org.mobicents.slee.container.component.deployment.jaxb.descriptors.ResourceAdaptorDescriptorFactoryImpl;
import org.mobicents.slee.container.component.deployment.jaxb.descriptors.ResourceAdaptorDescriptorImpl;
import org.mobicents.slee.container.component.deployment.jaxb.descriptors.ResourceAdaptorTypeDescriptorFactoryImpl;
import org.mobicents.slee.container.component.deployment.jaxb.descriptors.ResourceAdaptorTypeDescriptorImpl;
import org.mobicents.slee.container.component.deployment.jaxb.descriptors.SbbDescriptorFactoryImpl;
import org.mobicents.slee.container.component.deployment.jaxb.descriptors.SbbDescriptorImpl;
import org.mobicents.slee.container.component.library.JarDescriptor;
/**
* DU Component jar builder
*
* @author martins
*
*/
public class DeployableUnitJarComponentBuilder {
private static final Logger logger = Logger
.getLogger(DeployableUnitJarComponentBuilder.class);
private final ComponentManagementImpl componentManagement;
/**
*
*/
public DeployableUnitJarComponentBuilder(
ComponentManagementImpl componentManagement) {
this.componentManagement = componentManagement;
}
/**
* Builds the DU component from a jar with the specified file name,
* contained in the specified DU jar file. The component is built in the
* specified deployment dir.
*
* @param componentJarFileName
* @param deployableUnitJar
* @param deploymentDir
* @param documentBuilder
* @return
* @throws DeploymentException
*/
@SuppressWarnings("deprecation")
public List<AbstractSleeComponent> buildComponents(
String componentJarFileName, JarFile deployableUnitJar,
File deploymentDir)
throws DeploymentException {
final boolean loadClassesFirstFromAS = componentManagement.getClassLoaderFactory().getConfiguration().isLoadClassesFirstFromAS();
// extract the component jar from the DU jar, to the temp du dir
File extractedFile = extractFile(componentJarFileName,
deployableUnitJar, deploymentDir);
JarFile componentJarFile = null;
try {
componentJarFile = new JarFile(extractedFile);
} catch (IOException e) {
throw new DeploymentException(
"failed to create jar file for extracted file "
+ extractedFile);
}
InputStream componentDescriptorInputStream = null;
List<AbstractSleeComponent> components = new ArrayList<AbstractSleeComponent>();
try {
// now extract the jar file to a new dir
File componentJarDeploymentDir = new File(deploymentDir,
- componentJarFileName + "-contents");
+ componentJarFileName.replaceAll("/","-") + "-contents");
if (!componentJarDeploymentDir.exists()) {
if (!componentJarDeploymentDir.mkdir()) {
throw new SLEEException("dir for jar "
+ componentJarFileName + " not created in "
+ deploymentDir);
}
} else {
throw new SLEEException("dir for jar " + componentJarFileName
+ " already exists in " + deploymentDir);
}
extractJar(componentJarFile, componentJarDeploymentDir);
// create components from descriptor
JarEntry componentDescriptor = null;
if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/sbb-jar.xml")) != null) {
// create class loader domain shared by all components
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
new URL[] { componentJarDeploymentDir.toURL() }, Thread
.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// parse descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
SbbDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getSbbDescriptorFactory();
List<SbbDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (SbbDescriptorImpl descriptor : descriptors) {
SbbComponentImpl component = new SbbComponentImpl(
descriptor);
component.setDeploymentDir(componentJarDeploymentDir);
component.setClassLoaderDomain(classLoaderDomain);
components.add(component);
}
} else if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/profile-spec-jar.xml")) != null) {
// create class loader domain shared by all components
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
new URL[] { componentJarDeploymentDir.toURL() }, Thread
.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// parse descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
ProfileSpecificationDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getProfileSpecificationDescriptorFactory();
List<ProfileSpecificationDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (ProfileSpecificationDescriptorImpl descriptor : descriptors) {
ProfileSpecificationComponentImpl component = new ProfileSpecificationComponentImpl(
descriptor);
component.setDeploymentDir(componentJarDeploymentDir);
component.setClassLoaderDomain(classLoaderDomain);
components.add(component);
}
} else if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/library-jar.xml")) != null) {
Set<LibraryComponentImpl> libraryComponents = new HashSet<LibraryComponentImpl>();
// we need to gather all URLs for the shared class loader domain
// to watch
Set<URL> classLoaderDomainURLs = new HashSet<URL>();
classLoaderDomainURLs.add(componentJarDeploymentDir.toURL());
// parse the descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
LibraryDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getLibraryDescriptorFactory();
List<LibraryDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (LibraryDescriptorImpl descriptor : descriptors) {
LibraryComponentImpl component = new LibraryComponentImpl(
descriptor);
for (JarDescriptor mJar : descriptor.getJars()) {
classLoaderDomainURLs.add(new File(
componentJarDeploymentDir, mJar.getJarName())
.toURL());
}
// set deploy dir and cl domain
component.setDeploymentDir(componentJarDeploymentDir);
components.add(component);
libraryComponents.add(component);
}
// create shared url domain
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
classLoaderDomainURLs
.toArray(new URL[classLoaderDomainURLs.size()]),
Thread.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// add it to each component
for (LibraryComponentImpl component : libraryComponents) {
component.setClassLoaderDomain(classLoaderDomain);
}
} else if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/event-jar.xml")) != null) {
// create class loader domain shared by all components
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
new URL[] { componentJarDeploymentDir.toURL() }, Thread
.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// parse descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
EventTypeDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getEventTypeDescriptorFactory();
List<EventTypeDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (EventTypeDescriptorImpl descriptor : descriptors) {
EventTypeComponentImpl component = new EventTypeComponentImpl(
descriptor);
component.setDeploymentDir(componentJarDeploymentDir);
component.setClassLoaderDomain(classLoaderDomain);
components.add(component);
}
} else if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/resource-adaptor-type-jar.xml")) != null) {
// create class loader domain shared by all components
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
new URL[] { componentJarDeploymentDir.toURL() }, Thread
.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// parse descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
ResourceAdaptorTypeDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getResourceAdaptorTypeDescriptorFactory();
List<ResourceAdaptorTypeDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (ResourceAdaptorTypeDescriptorImpl descriptor : descriptors) {
ResourceAdaptorTypeComponentImpl component = new ResourceAdaptorTypeComponentImpl(
descriptor);
component.setDeploymentDir(componentJarDeploymentDir);
component.setClassLoaderDomain(classLoaderDomain);
components.add(component);
}
} else if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/resource-adaptor-jar.xml")) != null) {
// create class loader domain shared by all components
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
new URL[] { componentJarDeploymentDir.toURL() }, Thread
.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// parse descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
ResourceAdaptorDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getResourceAdaptorDescriptorFactory();
List<ResourceAdaptorDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (ResourceAdaptorDescriptorImpl descriptor : descriptors) {
ResourceAdaptorComponentImpl component = new ResourceAdaptorComponentImpl(
descriptor);
component.setDeploymentDir(componentJarDeploymentDir);
component.setClassLoaderDomain(classLoaderDomain);
components.add(component);
}
} else {
throw new DeploymentException(
"No Deployment Descriptor found in the "
+ componentJarFile.getName()
+ " entry of a deployable unit.");
}
} catch (IOException e) {
throw new DeploymentException(
"failed to parse jar descriptor from "
+ componentJarFile.getName(), e);
} finally {
if (componentDescriptorInputStream != null) {
try {
componentDescriptorInputStream.close();
} catch (IOException e) {
logger
.error("failed to close inputstream of descriptor for jar "
+ componentJarFile);
}
}
}
// close component jar file
try {
componentJarFile.close();
} catch (IOException e) {
logger.error("failed to close component jar file", e);
}
// and delete the extracted jar file, we don't need it anymore
if (!extractedFile.delete()) {
logger.warn("failed to delete " + extractedFile);
}
return components;
}
/**
* Extracts the file with name <code>fileName</code> out of the
* <code>containingJar</code> archive and stores it in <code>dstDir</code>.
*
* @param fileName
* the name of the file to extract.
* @param containingJar
* the archive where to extract it from.
* @param dstDir
* the location where the extracted file should be stored.
* @return a <code>java.io.File</code> reference to the extracted file.
* @throws DeploymentException
*/
private File extractFile(String fileName, JarFile containingJar, File dstDir)
throws DeploymentException {
ZipEntry zipFileEntry = containingJar.getEntry(fileName);
logger.trace("Extracting file " + fileName + " from "
+ containingJar.getName());
if (zipFileEntry == null) {
throw new DeploymentException("Error extracting jar file "
+ fileName + " from " + containingJar.getName());
}
File extractedFile = new File(dstDir, new File(zipFileEntry.getName())
.getName());
try {
pipeStream(containingJar.getInputStream(zipFileEntry),
new FileOutputStream(extractedFile));
} catch (FileNotFoundException e) {
throw new DeploymentException("file " + fileName + " not found in "
+ containingJar.getName(), e);
} catch (IOException e) {
throw new DeploymentException("erro extracting file " + fileName
+ " from " + containingJar.getName(), e);
}
logger.debug("Extracted file " + extractedFile.getName());
return extractedFile;
}
/**
* This method will extract all the files in the jar file
*
* @param jarFile
* the jar file
* @param dstDir
* the destination where files in the jar file be extracted
* @param deployableUnitID
* @return
* @throws DeploymentException
* failed to extract files
*/
private void extractJar(JarFile jarFile, File dstDir)
throws DeploymentException {
// Extract jar contents to a classpath location
JarInputStream jarIs = null;
try {
jarIs = new JarInputStream(new BufferedInputStream(
new FileInputStream(jarFile.getName())));
for (JarEntry entry = jarIs.getNextJarEntry(); jarIs.available() > 0
&& entry != null; entry = jarIs.getNextJarEntry()) {
logger.trace("jar entry = " + entry.getName());
if (entry.isDirectory()) {
// Create jar directories.
File dir = new File(dstDir, entry.getName());
if (!dir.exists()) {
if (!dir.mkdirs()) {
logger.debug("Failed to create directory "
+ dir.getAbsolutePath());
throw new IOException("Failed to create directory "
+ dir.getAbsolutePath());
}
} else
logger.trace("Created directory"
+ dir.getAbsolutePath());
} else // unzip files
{
File file = new File(dstDir, entry.getName());
File dir = file.getParentFile();
if (!dir.exists()) {
if (!dir.mkdirs()) {
logger.debug("Failed to create directory "
+ dir.getAbsolutePath());
throw new IOException("Failed to create directory "
+ dir.getAbsolutePath());
} else
logger.trace("Created directory"
+ dir.getAbsolutePath());
}
pipeStream(jarFile.getInputStream(entry),
new FileOutputStream(file));
}
}
} catch (Exception e) {
throw new DeploymentException("failed to extract jar file "
+ jarFile.getName());
} finally {
if (jarIs != null) {
try {
jarIs.close();
} catch (IOException e) {
logger.error("failed to close jar input stream", e);
}
}
}
}
private static byte buffer[] = new byte[8192];
/**
* Pipes data from the input stream into the output stream.
*
* @param is
* The InputStream where the data is coming from.
* @param os
* The OutputStream where the data is going to.
* @throws IOException
* if reading or writing the data fails.
*/
private void pipeStream(InputStream is, OutputStream os) throws IOException {
synchronized (buffer) {
try {
for (int bytesRead = is.read(buffer); bytesRead != -1; bytesRead = is
.read(buffer))
os.write(buffer, 0, bytesRead);
is.close();
os.close();
} catch (IOException ioe) {
try {
is.close();
} catch (Exception ioexc) {/* do sth? */
}
try {
os.close();
} catch (Exception ioexc) {/* do sth? */
}
throw ioe;
}
}
}
}
| true
| true
|
public List<AbstractSleeComponent> buildComponents(
String componentJarFileName, JarFile deployableUnitJar,
File deploymentDir)
throws DeploymentException {
final boolean loadClassesFirstFromAS = componentManagement.getClassLoaderFactory().getConfiguration().isLoadClassesFirstFromAS();
// extract the component jar from the DU jar, to the temp du dir
File extractedFile = extractFile(componentJarFileName,
deployableUnitJar, deploymentDir);
JarFile componentJarFile = null;
try {
componentJarFile = new JarFile(extractedFile);
} catch (IOException e) {
throw new DeploymentException(
"failed to create jar file for extracted file "
+ extractedFile);
}
InputStream componentDescriptorInputStream = null;
List<AbstractSleeComponent> components = new ArrayList<AbstractSleeComponent>();
try {
// now extract the jar file to a new dir
File componentJarDeploymentDir = new File(deploymentDir,
componentJarFileName + "-contents");
if (!componentJarDeploymentDir.exists()) {
if (!componentJarDeploymentDir.mkdir()) {
throw new SLEEException("dir for jar "
+ componentJarFileName + " not created in "
+ deploymentDir);
}
} else {
throw new SLEEException("dir for jar " + componentJarFileName
+ " already exists in " + deploymentDir);
}
extractJar(componentJarFile, componentJarDeploymentDir);
// create components from descriptor
JarEntry componentDescriptor = null;
if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/sbb-jar.xml")) != null) {
// create class loader domain shared by all components
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
new URL[] { componentJarDeploymentDir.toURL() }, Thread
.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// parse descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
SbbDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getSbbDescriptorFactory();
List<SbbDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (SbbDescriptorImpl descriptor : descriptors) {
SbbComponentImpl component = new SbbComponentImpl(
descriptor);
component.setDeploymentDir(componentJarDeploymentDir);
component.setClassLoaderDomain(classLoaderDomain);
components.add(component);
}
} else if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/profile-spec-jar.xml")) != null) {
// create class loader domain shared by all components
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
new URL[] { componentJarDeploymentDir.toURL() }, Thread
.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// parse descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
ProfileSpecificationDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getProfileSpecificationDescriptorFactory();
List<ProfileSpecificationDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (ProfileSpecificationDescriptorImpl descriptor : descriptors) {
ProfileSpecificationComponentImpl component = new ProfileSpecificationComponentImpl(
descriptor);
component.setDeploymentDir(componentJarDeploymentDir);
component.setClassLoaderDomain(classLoaderDomain);
components.add(component);
}
} else if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/library-jar.xml")) != null) {
Set<LibraryComponentImpl> libraryComponents = new HashSet<LibraryComponentImpl>();
// we need to gather all URLs for the shared class loader domain
// to watch
Set<URL> classLoaderDomainURLs = new HashSet<URL>();
classLoaderDomainURLs.add(componentJarDeploymentDir.toURL());
// parse the descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
LibraryDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getLibraryDescriptorFactory();
List<LibraryDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (LibraryDescriptorImpl descriptor : descriptors) {
LibraryComponentImpl component = new LibraryComponentImpl(
descriptor);
for (JarDescriptor mJar : descriptor.getJars()) {
classLoaderDomainURLs.add(new File(
componentJarDeploymentDir, mJar.getJarName())
.toURL());
}
// set deploy dir and cl domain
component.setDeploymentDir(componentJarDeploymentDir);
components.add(component);
libraryComponents.add(component);
}
// create shared url domain
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
classLoaderDomainURLs
.toArray(new URL[classLoaderDomainURLs.size()]),
Thread.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// add it to each component
for (LibraryComponentImpl component : libraryComponents) {
component.setClassLoaderDomain(classLoaderDomain);
}
} else if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/event-jar.xml")) != null) {
// create class loader domain shared by all components
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
new URL[] { componentJarDeploymentDir.toURL() }, Thread
.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// parse descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
EventTypeDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getEventTypeDescriptorFactory();
List<EventTypeDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (EventTypeDescriptorImpl descriptor : descriptors) {
EventTypeComponentImpl component = new EventTypeComponentImpl(
descriptor);
component.setDeploymentDir(componentJarDeploymentDir);
component.setClassLoaderDomain(classLoaderDomain);
components.add(component);
}
} else if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/resource-adaptor-type-jar.xml")) != null) {
// create class loader domain shared by all components
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
new URL[] { componentJarDeploymentDir.toURL() }, Thread
.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// parse descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
ResourceAdaptorTypeDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getResourceAdaptorTypeDescriptorFactory();
List<ResourceAdaptorTypeDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (ResourceAdaptorTypeDescriptorImpl descriptor : descriptors) {
ResourceAdaptorTypeComponentImpl component = new ResourceAdaptorTypeComponentImpl(
descriptor);
component.setDeploymentDir(componentJarDeploymentDir);
component.setClassLoaderDomain(classLoaderDomain);
components.add(component);
}
} else if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/resource-adaptor-jar.xml")) != null) {
// create class loader domain shared by all components
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
new URL[] { componentJarDeploymentDir.toURL() }, Thread
.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// parse descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
ResourceAdaptorDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getResourceAdaptorDescriptorFactory();
List<ResourceAdaptorDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (ResourceAdaptorDescriptorImpl descriptor : descriptors) {
ResourceAdaptorComponentImpl component = new ResourceAdaptorComponentImpl(
descriptor);
component.setDeploymentDir(componentJarDeploymentDir);
component.setClassLoaderDomain(classLoaderDomain);
components.add(component);
}
} else {
throw new DeploymentException(
"No Deployment Descriptor found in the "
+ componentJarFile.getName()
+ " entry of a deployable unit.");
}
} catch (IOException e) {
throw new DeploymentException(
"failed to parse jar descriptor from "
+ componentJarFile.getName(), e);
} finally {
if (componentDescriptorInputStream != null) {
try {
componentDescriptorInputStream.close();
} catch (IOException e) {
logger
.error("failed to close inputstream of descriptor for jar "
+ componentJarFile);
}
}
}
// close component jar file
try {
componentJarFile.close();
} catch (IOException e) {
logger.error("failed to close component jar file", e);
}
// and delete the extracted jar file, we don't need it anymore
if (!extractedFile.delete()) {
logger.warn("failed to delete " + extractedFile);
}
return components;
}
|
public List<AbstractSleeComponent> buildComponents(
String componentJarFileName, JarFile deployableUnitJar,
File deploymentDir)
throws DeploymentException {
final boolean loadClassesFirstFromAS = componentManagement.getClassLoaderFactory().getConfiguration().isLoadClassesFirstFromAS();
// extract the component jar from the DU jar, to the temp du dir
File extractedFile = extractFile(componentJarFileName,
deployableUnitJar, deploymentDir);
JarFile componentJarFile = null;
try {
componentJarFile = new JarFile(extractedFile);
} catch (IOException e) {
throw new DeploymentException(
"failed to create jar file for extracted file "
+ extractedFile);
}
InputStream componentDescriptorInputStream = null;
List<AbstractSleeComponent> components = new ArrayList<AbstractSleeComponent>();
try {
// now extract the jar file to a new dir
File componentJarDeploymentDir = new File(deploymentDir,
componentJarFileName.replaceAll("/","-") + "-contents");
if (!componentJarDeploymentDir.exists()) {
if (!componentJarDeploymentDir.mkdir()) {
throw new SLEEException("dir for jar "
+ componentJarFileName + " not created in "
+ deploymentDir);
}
} else {
throw new SLEEException("dir for jar " + componentJarFileName
+ " already exists in " + deploymentDir);
}
extractJar(componentJarFile, componentJarDeploymentDir);
// create components from descriptor
JarEntry componentDescriptor = null;
if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/sbb-jar.xml")) != null) {
// create class loader domain shared by all components
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
new URL[] { componentJarDeploymentDir.toURL() }, Thread
.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// parse descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
SbbDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getSbbDescriptorFactory();
List<SbbDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (SbbDescriptorImpl descriptor : descriptors) {
SbbComponentImpl component = new SbbComponentImpl(
descriptor);
component.setDeploymentDir(componentJarDeploymentDir);
component.setClassLoaderDomain(classLoaderDomain);
components.add(component);
}
} else if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/profile-spec-jar.xml")) != null) {
// create class loader domain shared by all components
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
new URL[] { componentJarDeploymentDir.toURL() }, Thread
.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// parse descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
ProfileSpecificationDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getProfileSpecificationDescriptorFactory();
List<ProfileSpecificationDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (ProfileSpecificationDescriptorImpl descriptor : descriptors) {
ProfileSpecificationComponentImpl component = new ProfileSpecificationComponentImpl(
descriptor);
component.setDeploymentDir(componentJarDeploymentDir);
component.setClassLoaderDomain(classLoaderDomain);
components.add(component);
}
} else if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/library-jar.xml")) != null) {
Set<LibraryComponentImpl> libraryComponents = new HashSet<LibraryComponentImpl>();
// we need to gather all URLs for the shared class loader domain
// to watch
Set<URL> classLoaderDomainURLs = new HashSet<URL>();
classLoaderDomainURLs.add(componentJarDeploymentDir.toURL());
// parse the descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
LibraryDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getLibraryDescriptorFactory();
List<LibraryDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (LibraryDescriptorImpl descriptor : descriptors) {
LibraryComponentImpl component = new LibraryComponentImpl(
descriptor);
for (JarDescriptor mJar : descriptor.getJars()) {
classLoaderDomainURLs.add(new File(
componentJarDeploymentDir, mJar.getJarName())
.toURL());
}
// set deploy dir and cl domain
component.setDeploymentDir(componentJarDeploymentDir);
components.add(component);
libraryComponents.add(component);
}
// create shared url domain
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
classLoaderDomainURLs
.toArray(new URL[classLoaderDomainURLs.size()]),
Thread.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// add it to each component
for (LibraryComponentImpl component : libraryComponents) {
component.setClassLoaderDomain(classLoaderDomain);
}
} else if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/event-jar.xml")) != null) {
// create class loader domain shared by all components
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
new URL[] { componentJarDeploymentDir.toURL() }, Thread
.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// parse descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
EventTypeDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getEventTypeDescriptorFactory();
List<EventTypeDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (EventTypeDescriptorImpl descriptor : descriptors) {
EventTypeComponentImpl component = new EventTypeComponentImpl(
descriptor);
component.setDeploymentDir(componentJarDeploymentDir);
component.setClassLoaderDomain(classLoaderDomain);
components.add(component);
}
} else if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/resource-adaptor-type-jar.xml")) != null) {
// create class loader domain shared by all components
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
new URL[] { componentJarDeploymentDir.toURL() }, Thread
.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// parse descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
ResourceAdaptorTypeDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getResourceAdaptorTypeDescriptorFactory();
List<ResourceAdaptorTypeDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (ResourceAdaptorTypeDescriptorImpl descriptor : descriptors) {
ResourceAdaptorTypeComponentImpl component = new ResourceAdaptorTypeComponentImpl(
descriptor);
component.setDeploymentDir(componentJarDeploymentDir);
component.setClassLoaderDomain(classLoaderDomain);
components.add(component);
}
} else if ((componentDescriptor = componentJarFile
.getJarEntry("META-INF/resource-adaptor-jar.xml")) != null) {
// create class loader domain shared by all components
URLClassLoaderDomain classLoaderDomain = new URLClassLoaderDomainImpl(
new URL[] { componentJarDeploymentDir.toURL() }, Thread
.currentThread().getContextClassLoader(),
loadClassesFirstFromAS);
// parse descriptor
componentDescriptorInputStream = componentJarFile
.getInputStream(componentDescriptor);
ResourceAdaptorDescriptorFactoryImpl descriptorFactory = componentManagement
.getComponentDescriptorFactory()
.getResourceAdaptorDescriptorFactory();
List<ResourceAdaptorDescriptorImpl> descriptors = descriptorFactory
.parse(componentDescriptorInputStream);
// create components
for (ResourceAdaptorDescriptorImpl descriptor : descriptors) {
ResourceAdaptorComponentImpl component = new ResourceAdaptorComponentImpl(
descriptor);
component.setDeploymentDir(componentJarDeploymentDir);
component.setClassLoaderDomain(classLoaderDomain);
components.add(component);
}
} else {
throw new DeploymentException(
"No Deployment Descriptor found in the "
+ componentJarFile.getName()
+ " entry of a deployable unit.");
}
} catch (IOException e) {
throw new DeploymentException(
"failed to parse jar descriptor from "
+ componentJarFile.getName(), e);
} finally {
if (componentDescriptorInputStream != null) {
try {
componentDescriptorInputStream.close();
} catch (IOException e) {
logger
.error("failed to close inputstream of descriptor for jar "
+ componentJarFile);
}
}
}
// close component jar file
try {
componentJarFile.close();
} catch (IOException e) {
logger.error("failed to close component jar file", e);
}
// and delete the extracted jar file, we don't need it anymore
if (!extractedFile.delete()) {
logger.warn("failed to delete " + extractedFile);
}
return components;
}
|
diff --git a/src/main/java/it/sevenbits/conferences/web/controller/ApplyForReportController.java b/src/main/java/it/sevenbits/conferences/web/controller/ApplyForReportController.java
index b4c7620..bb467d0 100644
--- a/src/main/java/it/sevenbits/conferences/web/controller/ApplyForReportController.java
+++ b/src/main/java/it/sevenbits/conferences/web/controller/ApplyForReportController.java
@@ -1,61 +1,62 @@
package it.sevenbits.conferences.web.controller;
import it.sevenbits.conferences.web.form.ApplyForReportForm;
import it.sevenbits.conferences.web.form.JsonResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Controller;
import org.springframework.validation.BindingResult;
import org.springframework.validation.FieldError;
import org.springframework.validation.Validator;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@Controller
public class ApplyForReportController {
@Autowired
@Qualifier("applyForReportValidator")
private Validator validator;
@RequestMapping(value = "/apply-for-report", method = RequestMethod.GET)
public ModelAndView showForm() {
ModelAndView modelAndView = new ModelAndView("apply-for-report");
return modelAndView;
}
@RequestMapping(value = "/apply-for-report", method = RequestMethod.POST)
@ResponseBody
public JsonResponse submitForm(@ModelAttribute(value = "applyForReportForm") ApplyForReportForm applyForReportForm, BindingResult bindingResult) {
JsonResponse response = new JsonResponse();
validator.validate(applyForReportForm, bindingResult);
if (bindingResult.hasErrors()) {
response.setStatus("FAIL");
Map<String, String> errors = new HashMap<>();
for (FieldError fieldError: bindingResult.getFieldErrors()) {
if (!errors.containsKey(fieldError.getField())) {
errors.put(fieldError.getField(), fieldError.getDefaultMessage());
}
}
errors.put("message", "Форма заполнена неверно.");
response.setResult(errors);
} else {
+ // todo - form data save
response.setStatus("SUCCESS");
response.setResult(Collections.singletonMap("message", "Заявка отправлена."));
}
return response;
}
}
| true
| true
|
public JsonResponse submitForm(@ModelAttribute(value = "applyForReportForm") ApplyForReportForm applyForReportForm, BindingResult bindingResult) {
JsonResponse response = new JsonResponse();
validator.validate(applyForReportForm, bindingResult);
if (bindingResult.hasErrors()) {
response.setStatus("FAIL");
Map<String, String> errors = new HashMap<>();
for (FieldError fieldError: bindingResult.getFieldErrors()) {
if (!errors.containsKey(fieldError.getField())) {
errors.put(fieldError.getField(), fieldError.getDefaultMessage());
}
}
errors.put("message", "Форма заполнена неверно.");
response.setResult(errors);
} else {
response.setStatus("SUCCESS");
response.setResult(Collections.singletonMap("message", "Заявка отправлена."));
}
return response;
}
|
public JsonResponse submitForm(@ModelAttribute(value = "applyForReportForm") ApplyForReportForm applyForReportForm, BindingResult bindingResult) {
JsonResponse response = new JsonResponse();
validator.validate(applyForReportForm, bindingResult);
if (bindingResult.hasErrors()) {
response.setStatus("FAIL");
Map<String, String> errors = new HashMap<>();
for (FieldError fieldError: bindingResult.getFieldErrors()) {
if (!errors.containsKey(fieldError.getField())) {
errors.put(fieldError.getField(), fieldError.getDefaultMessage());
}
}
errors.put("message", "Форма заполнена неверно.");
response.setResult(errors);
} else {
// todo - form data save
response.setStatus("SUCCESS");
response.setResult(Collections.singletonMap("message", "Заявка отправлена."));
}
return response;
}
|
diff --git a/src/java-common/org/xins/common/collections/expiry/ExpiryFolder.java b/src/java-common/org/xins/common/collections/expiry/ExpiryFolder.java
index fc4b1a041..91527d8ae 100644
--- a/src/java-common/org/xins/common/collections/expiry/ExpiryFolder.java
+++ b/src/java-common/org/xins/common/collections/expiry/ExpiryFolder.java
@@ -1,608 +1,609 @@
/*
* $Id$
*/
package org.xins.common.collections.expiry;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.xins.common.Log;
import org.xins.common.MandatoryArgumentChecker;
import org.xins.common.threads.Doorman;
/**
* Expiry folder. Contains values indexed by key. Entries in this folder will
* expire after a predefined amount of time, unless their lifetime is extended
* within that timeframe. This is done using the {@link #get(Object)} method.
*
* <p>Listeners are supported. Listeners are added using the
* {@link #addListener(ExpiryListener)} method and removed using the
* {@link #removeListener(ExpiryListener)} method. If a listener is registered
* multiple times, it will receive the events multiple times as well. And it
* will have to be removed multiple times as well.
*
* <p>This class is thread-safe.
*
* @version $Revision$ $Date$
* @author Ernst de Haan (<a href="mailto:ernst.dehaan@nl.wanadoo.com">ernst.dehaan@nl.wanadoo.com</a>)
* @author Anthony Goubard (<a href="mailto:anthony.goubard@nl.wanadoo.com">anthony.goubard@nl.wanadoo.com</a>)
*/
public final class ExpiryFolder
extends Object {
//-------------------------------------------------------------------------
// Class fields
//-------------------------------------------------------------------------
/**
* The name of this class.
*/
private static final String EXPIRY_FOLDER_CLASSNAME = ExpiryFolder.class.getName();
/**
* The initial size for the queue of threads waiting to obtain read or
* write access to a resource.
*/
private static final int INITIAL_QUEUE_SIZE = 89;
//-------------------------------------------------------------------------
// Class functions
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
// Constructor
//-------------------------------------------------------------------------
/**
* Constructs a new <code>ExpiryFolder</code>.
*
* @param name
* description of this folder, to be used in log and exception messages,
* not <code>null</code>.
*
* @param strategy
* the strategy that should be applied, not <code>null</code>.
*
* @param strictChecking
* flag that indicates if checking of thread synchronization operations
* should be strict or loose.
*
* @param maxQueueWaitTime
* the maximum time a thread can wait in the queue for obtaining read or
* write access to a resource, must be > 0L.
*
* @throws IllegalArgumentException
* if <code>name == null || strategy == null || maxQueueWaitTime <= 0L</code>.
*/
public ExpiryFolder(String name,
ExpiryStrategy strategy,
boolean strictChecking,
long maxQueueWaitTime)
throws IllegalArgumentException {
// Check preconditions
MandatoryArgumentChecker.check("name", name, "strategy", strategy);
// Initialize fields
_name = name;
_asString = "ExpiryFolder \"" + _name + '"';
_strategy = strategy;
_recentlyAccessed = new HashMap(89);
_slotCount = strategy.getSlotCount();
_slots = new Map[_slotCount];
_lastSlot = _slotCount - 1;
_sizeLock = new Object();
_listeners = new ArrayList(5);
// Initialize all the fields in _slots
for (int i = 0; i < _slotCount; i++) {
_slots[i] = new HashMap(89);
}
// Create the doormen
_recentlyAccessedDoorman = new Doorman("recentlyAccessed", strictChecking, INITIAL_QUEUE_SIZE, maxQueueWaitTime);
_slotsDoorman = new Doorman("slots", strictChecking, INITIAL_QUEUE_SIZE, maxQueueWaitTime);
// Notify the strategy
strategy.folderAdded(this);
}
//-------------------------------------------------------------------------
// Fields
//-------------------------------------------------------------------------
/**
* The name of this expiry folder.
*/
private final String _name;
/**
* String representation. Cannot be <code>null</code>.
*/
private final String _asString;
/**
* The strategy used. This field cannot be <code>null</code>.
*/
private final ExpiryStrategy _strategy;
/**
* The most recently accessed entries. This field cannot be
* <code>null</code>. The entries in this map will expire after
* {@link ExpiryStrategy#getTimeOut()} milliseconds, plus at maximum
* {@link ExpiryStrategy#getPrecision()} milliseconds.
*/
private volatile Map _recentlyAccessed;
/**
* Number of active slots. Always equals
* {@link #_slots}<code>.length</code>.
*/
private final int _slotCount;
/**
* The index of the last slot. This is always
* {@link #_slotCount}<code> - 1</code>.
*/
private final int _lastSlot;
/**
* Slots to contain the maps with entries that are not the most recently
* accessed. The further back in the array, the faster the entries will
* expire.
*/
private final Map[] _slots;
/**
* Doorman protecting the field <code>_recentlyAccessed</code>.
*/
private final Doorman _recentlyAccessedDoorman;
/**
* Doorman protecting the field <code>_slots</code>.
*/
private final Doorman _slotsDoorman;
/**
* The size of this folder.
*/
private int _size;
/**
* Lock for the <code>_size</code>.
*/
private final Object _sizeLock;
/**
* The set of listeners. May be empty, but never is <code>null</code>.
*/
private final List _listeners;
//-------------------------------------------------------------------------
// Methods
//-------------------------------------------------------------------------
/**
* Returns the name given to this expiry folder.
*
* @return
* the name assigned to this expiry folder, not <code>null</code>.
*/
public final String getName() {
return _name;
}
/**
* Notifies this map that the precision time frame has passed since the
* last tick.
*
* <p>If any entries are expirable, they will be removed from this folder.
*/
void tick() {
// Allocate memory _before_ entering any doorman, so if this fails, then
// we don't hold any locks
Map newRecentlyAccessed = new HashMap();
// First enter the protected area for '_recentlyAccessed', because that
// is the most difficult to enter
_recentlyAccessedDoorman.enterAsWriter();
// Then enter the protected area for '_slots' as well
_slotsDoorman.enterAsWriter();
// Keep a link to the old map with recently accessed elements and then
// reset _recentlyAccessed so we can leave the protected area for
// '_recentlyAccessed' right away
Map oldRecentlyAccessed = _recentlyAccessed;
_recentlyAccessed = newRecentlyAccessed;
// Leave the protected area for '_recentlyAccessed' first, because that
// is the heaviest used
_recentlyAccessedDoorman.leaveAsWriter();
// Shift the slots
Map toBeExpired = _slots[_lastSlot];
for (int i = _lastSlot; i > 0; i--) {
_slots[i] = _slots[i - 1];
}
_slots[0] = oldRecentlyAccessed;
// Leave the protected area for '_slots' as well.
_slotsDoorman.leaveAsWriter();
// Adjust the size
int toBeExpiredSize = toBeExpired == null ? 0 : toBeExpired.size();
if (toBeExpiredSize > 0) {
+ int newSize;
synchronized (_sizeLock) {
_size -= toBeExpiredSize;
newSize = _size;
if (_size < 0) {
_size = 0;
}
}
// If the new size was negative, it has been fixed already, but
// report it now, after the synchronized section
if (newSize < 0) {
Log.log_3006(EXPIRY_FOLDER_CLASSNAME, "tick()", "Size of expiry folder \"" + _name + "\" dropped to " + newSize + ", adjusted it to 0.");
}
Log.log_3400(_asString, toBeExpiredSize, newSize);
} else {
Log.log_3400(_asString, 0, _size);
}
// XXX: Should we do this in a separate thread, so all locks held by the
// ExpiryStrategy are released?
// Get a copy of the list of listeners
List listeners;
synchronized (_listeners) {
listeners = new ArrayList(_listeners);
}
// Notify all listeners
int count = listeners.size();
if (count > 0) {
Map unmodifiableExpired = Collections.unmodifiableMap(toBeExpired);
for (int i = 0; i < count; i++) {
ExpiryListener listener = (ExpiryListener) listeners.get(i);
listener.expired(this, unmodifiableExpired);
}
}
}
/**
* Adds the specified object as a listener for expiry events.
*
* @param listener
* the listener to be registered, cannot be <code>null</code>.
*
* @throws IllegalArgumentException
* if <code>listener == null</code>.
*/
public void addListener(ExpiryListener listener)
throws IllegalArgumentException {
MandatoryArgumentChecker.check("listener", listener);
synchronized (_listeners) {
_listeners.add(listener);
}
}
/**
* Removes the specified object as a listener for expiry events.
*
* @param listener
* the listener to be unregistered, cannot be <code>null</code>.
*
* @throws IllegalArgumentException
* if <code>listener == null</code>.
*/
public void removeListener(ExpiryListener listener)
throws IllegalArgumentException {
MandatoryArgumentChecker.check("listener", listener);
synchronized (_listeners) {
_listeners.remove(listener);
}
}
/**
* Gets the number of entries.
*
* @return
* the number of entries in this expiry folder, always >= 0.
*/
public int size() {
synchronized (_sizeLock) {
return _size;
}
}
/**
* Gets the value associated with a key and extends the lifetime of the
* matching entry, if there was a match.
*
* <p>The more recently the specified entry was accessed, the faster the
* lookup.
*
* @param key
* the key to lookup, cannot be <code>null</code>.
*
* @return
* the value associated with the specified key, or <code>null</code> if
* and only if this folder does not contain an entry with the specified
* key.
*
* @throws IllegalArgumentException
* if <code>key == null</code>.
*/
public Object get(Object key) throws IllegalArgumentException {
// Check preconditions
MandatoryArgumentChecker.check("key", key);
// Search in the recently accessed map first
_recentlyAccessedDoorman.enterAsReader();
Object value;
try {
value = _recentlyAccessed.get(key);
} finally {
_recentlyAccessedDoorman.leaveAsReader();
}
// If not found, then look in the slots
// TODO: Determine whether enterAsReader() is really good enough. It
// seems that enterAsWriter() should be called. However, this may
// have a major impact on performance.
if (value == null) {
_slotsDoorman.enterAsReader();
try {
for (int i = 0; i < _slotCount && value == null; i++) {
value = _slots[i].remove(key);
}
} finally {
_slotsDoorman.leaveAsReader();
}
if (value != null) {
_recentlyAccessedDoorman.enterAsWriter();
try {
_recentlyAccessed.put(key, value);
} finally {
_recentlyAccessedDoorman.leaveAsWriter();
}
}
}
return value;
}
/**
* Finds the value associated with a key. The lifetime of the matching
* entry is not extended.
*
* <p>The more recently the specified entry was accessed, the faster the
* lookup.
*
* @param key
* the key to lookup, cannot be <code>null</code>.
*
* @return
* the value associated with the specified key, or <code>null</code> if
* and only if this folder does not contain an entry with the specified
* key.
*
* @throws IllegalArgumentException
* if <code>key == null</code>.
*/
public Object find(Object key) throws IllegalArgumentException {
// Check preconditions
MandatoryArgumentChecker.check("key", key);
// Search in the recently accessed map first
_recentlyAccessedDoorman.enterAsReader();
Object value;
try {
value = _recentlyAccessed.get(key);
} finally {
_recentlyAccessedDoorman.leaveAsReader();
}
// If not found, then look in the slots
if (value == null) {
_slotsDoorman.enterAsReader();
try {
for (int i = 0; i < _slotCount && value == null; i++) {
value = _slots[i].get(key);
}
} finally {
_slotsDoorman.leaveAsReader();
}
}
return value;
}
/**
* Associates the specified value with the specified key in this folder.
*
* @param key
* they key for the entry, cannot be <code>null</code>.
*
* @param value
* they value for the entry, cannot be <code>null</code>.
*
* @throws IllegalArgumentException
* if <code>key == null || value == null</code>.
*/
public void put(Object key, Object value)
throws IllegalArgumentException {
// Check preconditions
MandatoryArgumentChecker.check("key", key, "value", value);
// Store the association in the set of recently accessed entries
_recentlyAccessedDoorman.enterAsWriter();
try {
_recentlyAccessed.put(key, value);
// Bump the size
synchronized (_sizeLock) {
_size++;
}
} finally {
_recentlyAccessedDoorman.leaveAsWriter();
}
}
/**
* Removes the specified key from this folder.
*
* @param key
* the key for the entry, cannot be <code>null</code>.
*
* @return
* the old value associated with the specified key, or <code>null</code>
* if and only if this folder does not contain an entry with the
* specified key.
*
* @throws IllegalArgumentException
* if <code>key == null</code>.
*/
public Object remove(Object key)
throws IllegalArgumentException {
// Check preconditions
MandatoryArgumentChecker.check("key", key);
// Remove the key in the set of recently accessed entries
_recentlyAccessedDoorman.enterAsReader();
Object value;
try {
value = _recentlyAccessed.remove(key);
if (value != null) {
// Bump the size
synchronized (_sizeLock) {
_size--;
}
}
} finally {
_recentlyAccessedDoorman.leaveAsReader();
}
// If not found, then look in the slots
if (value == null) {
_slotsDoorman.enterAsReader();
try {
for (int i = 0; i < _slotCount && value == null; i++) {
value = _slots[i].remove(key);
}
if (value != null) {
// Bump the size
synchronized (_sizeLock) {
_size--;
}
}
} finally {
_slotsDoorman.leaveAsReader();
}
}
return value;
}
/**
* Copies the entries of this ExpiryFolder into another one.
* This method does not perform a deep copy, so if a key is added or
* removed, both folders will be modified.
*
* @param newFolder
* the new folder where the entries should be copied into,
* cannot be <code>null</code>, cannot be <code>this</code>.
*
* @throws IllegalArgumentException
* if <code>newFolder == null</code> or <code>newFolder == this</code>
* or the precision is the newFolder is not the same as for this folder.
*/
public void copy(ExpiryFolder newFolder)
throws IllegalArgumentException {
// Check preconditions
MandatoryArgumentChecker.check("newFolder", newFolder);
if (newFolder == this) {
throw new IllegalArgumentException("The folder can not be copied into itself.");
}
if (newFolder.getStrategy().getPrecision() != getStrategy().getPrecision()) {
throw new IllegalArgumentException("The folders must have the same precision.");
}
// Copy the recentlyAccessed
_recentlyAccessedDoorman.enterAsReader();
newFolder._recentlyAccessedDoorman.enterAsWriter();
try {
newFolder._recentlyAccessed = _recentlyAccessed;
synchronized(newFolder._sizeLock) {
newFolder._size = newFolder._recentlyAccessed.size();
}
} finally {
try {
newFolder._recentlyAccessedDoorman.leaveAsWriter();
} finally {
_recentlyAccessedDoorman.leaveAsReader();
}
}
// Copy the slots
_slotsDoorman.enterAsReader();
newFolder._slotsDoorman.enterAsWriter();
try {
for (int i = 0; i < _slotCount && i < newFolder._slotCount; i++) {
newFolder._slots[i] = _slots[i];
synchronized(newFolder._sizeLock) {
newFolder._size += newFolder._slots[i].size();
}
}
} finally {
try {
newFolder._slotsDoorman.leaveAsWriter();
} finally {
_slotsDoorman.leaveAsReader();
}
}
}
/**
* Returns the strategy associated with this folder
*
* @return
* the strategy, never <code>null</code>.
*/
public ExpiryStrategy getStrategy() {
return _strategy;
}
/**
* Returns a textual representation of this object.
*
* @return
* a textual representation of this <code>ExpiryFolder</code>, which
* includes the name.
*/
public String toString() {
return _asString;
}
}
| true
| true
|
void tick() {
// Allocate memory _before_ entering any doorman, so if this fails, then
// we don't hold any locks
Map newRecentlyAccessed = new HashMap();
// First enter the protected area for '_recentlyAccessed', because that
// is the most difficult to enter
_recentlyAccessedDoorman.enterAsWriter();
// Then enter the protected area for '_slots' as well
_slotsDoorman.enterAsWriter();
// Keep a link to the old map with recently accessed elements and then
// reset _recentlyAccessed so we can leave the protected area for
// '_recentlyAccessed' right away
Map oldRecentlyAccessed = _recentlyAccessed;
_recentlyAccessed = newRecentlyAccessed;
// Leave the protected area for '_recentlyAccessed' first, because that
// is the heaviest used
_recentlyAccessedDoorman.leaveAsWriter();
// Shift the slots
Map toBeExpired = _slots[_lastSlot];
for (int i = _lastSlot; i > 0; i--) {
_slots[i] = _slots[i - 1];
}
_slots[0] = oldRecentlyAccessed;
// Leave the protected area for '_slots' as well.
_slotsDoorman.leaveAsWriter();
// Adjust the size
int toBeExpiredSize = toBeExpired == null ? 0 : toBeExpired.size();
if (toBeExpiredSize > 0) {
synchronized (_sizeLock) {
_size -= toBeExpiredSize;
newSize = _size;
if (_size < 0) {
_size = 0;
}
}
// If the new size was negative, it has been fixed already, but
// report it now, after the synchronized section
if (newSize < 0) {
Log.log_3006(EXPIRY_FOLDER_CLASSNAME, "tick()", "Size of expiry folder \"" + _name + "\" dropped to " + newSize + ", adjusted it to 0.");
}
Log.log_3400(_asString, toBeExpiredSize, newSize);
} else {
Log.log_3400(_asString, 0, _size);
}
// XXX: Should we do this in a separate thread, so all locks held by the
// ExpiryStrategy are released?
// Get a copy of the list of listeners
List listeners;
synchronized (_listeners) {
listeners = new ArrayList(_listeners);
}
// Notify all listeners
int count = listeners.size();
if (count > 0) {
Map unmodifiableExpired = Collections.unmodifiableMap(toBeExpired);
for (int i = 0; i < count; i++) {
ExpiryListener listener = (ExpiryListener) listeners.get(i);
listener.expired(this, unmodifiableExpired);
}
}
}
|
void tick() {
// Allocate memory _before_ entering any doorman, so if this fails, then
// we don't hold any locks
Map newRecentlyAccessed = new HashMap();
// First enter the protected area for '_recentlyAccessed', because that
// is the most difficult to enter
_recentlyAccessedDoorman.enterAsWriter();
// Then enter the protected area for '_slots' as well
_slotsDoorman.enterAsWriter();
// Keep a link to the old map with recently accessed elements and then
// reset _recentlyAccessed so we can leave the protected area for
// '_recentlyAccessed' right away
Map oldRecentlyAccessed = _recentlyAccessed;
_recentlyAccessed = newRecentlyAccessed;
// Leave the protected area for '_recentlyAccessed' first, because that
// is the heaviest used
_recentlyAccessedDoorman.leaveAsWriter();
// Shift the slots
Map toBeExpired = _slots[_lastSlot];
for (int i = _lastSlot; i > 0; i--) {
_slots[i] = _slots[i - 1];
}
_slots[0] = oldRecentlyAccessed;
// Leave the protected area for '_slots' as well.
_slotsDoorman.leaveAsWriter();
// Adjust the size
int toBeExpiredSize = toBeExpired == null ? 0 : toBeExpired.size();
if (toBeExpiredSize > 0) {
int newSize;
synchronized (_sizeLock) {
_size -= toBeExpiredSize;
newSize = _size;
if (_size < 0) {
_size = 0;
}
}
// If the new size was negative, it has been fixed already, but
// report it now, after the synchronized section
if (newSize < 0) {
Log.log_3006(EXPIRY_FOLDER_CLASSNAME, "tick()", "Size of expiry folder \"" + _name + "\" dropped to " + newSize + ", adjusted it to 0.");
}
Log.log_3400(_asString, toBeExpiredSize, newSize);
} else {
Log.log_3400(_asString, 0, _size);
}
// XXX: Should we do this in a separate thread, so all locks held by the
// ExpiryStrategy are released?
// Get a copy of the list of listeners
List listeners;
synchronized (_listeners) {
listeners = new ArrayList(_listeners);
}
// Notify all listeners
int count = listeners.size();
if (count > 0) {
Map unmodifiableExpired = Collections.unmodifiableMap(toBeExpired);
for (int i = 0; i < count; i++) {
ExpiryListener listener = (ExpiryListener) listeners.get(i);
listener.expired(this, unmodifiableExpired);
}
}
}
|
diff --git a/src/swsec/Query.java b/src/swsec/Query.java
index bd486fc..9cbf121 100644
--- a/src/swsec/Query.java
+++ b/src/swsec/Query.java
@@ -1,41 +1,41 @@
package swsec;
import java.sql.*;
public class Query {
public static ResultSet SelectCountry() throws Exception
{
ResultSet rs = null;
Connection con = ConnectionDB.getConnection();
PreparedStatement stat = con.prepareStatement("SELECT full_name FROM country");
rs = stat.executeQuery();
return rs;
}
public static ResultSet selectSchools(String countryFullName) throws Exception
{
Connection con = ConnectionDB.getConnection();
PreparedStatement stat=con.prepareStatement("SELECT * FROM country, school WHERE school.country = country.short_name AND country.full_name = ?");
stat.setString(1, countryFullName);
return stat.executeQuery();
}
public static ResultSet selectReviews(String school_name) throws Exception
{
Connection con = ConnectionDB.getConnection();
PreparedStatement stat = con.prepareStatement("SELECT * FROM user_reviews, school WHERE user_reviews.school_id = school.school_id AND school.full_name = ?");
stat.setString(1,school_name);
return stat.executeQuery();
}
public static void insertReview(String school_id, String name, String review) throws Exception
{
Connection con = ConnectionDB.getConnection();
PreparedStatement stat = con.prepareStatement("INSERT INTO user_reviews VALUES (?,?,?)");
- stat.setString(1, school_id);
+ stat.setInt(1,Integer.parseInt(school_id));
stat.setString(2, name);
stat.setString(3, review);
- stat.executeQuery();
+ stat.executeUpdate();
}
}
| false
| true
|
public static void insertReview(String school_id, String name, String review) throws Exception
{
Connection con = ConnectionDB.getConnection();
PreparedStatement stat = con.prepareStatement("INSERT INTO user_reviews VALUES (?,?,?)");
stat.setString(1, school_id);
stat.setString(2, name);
stat.setString(3, review);
stat.executeQuery();
}
|
public static void insertReview(String school_id, String name, String review) throws Exception
{
Connection con = ConnectionDB.getConnection();
PreparedStatement stat = con.prepareStatement("INSERT INTO user_reviews VALUES (?,?,?)");
stat.setInt(1,Integer.parseInt(school_id));
stat.setString(2, name);
stat.setString(3, review);
stat.executeUpdate();
}
|
diff --git a/frontend/webadmin/modules/uicommonweb/src/main/java/org/ovirt/engine/ui/uicommonweb/models/hosts/HostModel.java b/frontend/webadmin/modules/uicommonweb/src/main/java/org/ovirt/engine/ui/uicommonweb/models/hosts/HostModel.java
index 82b432a7..2574f603 100644
--- a/frontend/webadmin/modules/uicommonweb/src/main/java/org/ovirt/engine/ui/uicommonweb/models/hosts/HostModel.java
+++ b/frontend/webadmin/modules/uicommonweb/src/main/java/org/ovirt/engine/ui/uicommonweb/models/hosts/HostModel.java
@@ -1,751 +1,759 @@
package org.ovirt.engine.ui.uicommonweb.models.hosts;
import org.ovirt.engine.core.common.businessentities.FenceStatusReturnValue;
import org.ovirt.engine.core.common.businessentities.VDSGroup;
import org.ovirt.engine.core.common.businessentities.storage_pool;
import org.ovirt.engine.core.common.queries.GetNewVdsFenceStatusParameters;
import org.ovirt.engine.core.common.queries.ValueObjectMap;
import org.ovirt.engine.core.common.queries.VdcQueryType;
import org.ovirt.engine.core.compat.Event;
import org.ovirt.engine.core.compat.EventArgs;
import org.ovirt.engine.core.compat.NGuid;
import org.ovirt.engine.core.compat.PropertyChangedEventArgs;
import org.ovirt.engine.core.compat.StringFormat;
import org.ovirt.engine.core.compat.StringHelper;
import org.ovirt.engine.ui.frontend.AsyncQuery;
import org.ovirt.engine.ui.frontend.Frontend;
import org.ovirt.engine.ui.frontend.INewAsyncCallback;
import org.ovirt.engine.ui.uicommonweb.Linq;
import org.ovirt.engine.ui.uicommonweb.UICommand;
import org.ovirt.engine.ui.uicommonweb.dataprovider.AsyncDataProvider;
import org.ovirt.engine.ui.uicommonweb.models.EntityModel;
import org.ovirt.engine.ui.uicommonweb.models.ListModel;
import org.ovirt.engine.ui.uicommonweb.models.Model;
import org.ovirt.engine.ui.uicommonweb.validation.HostAddressValidation;
import org.ovirt.engine.ui.uicommonweb.validation.IValidation;
import org.ovirt.engine.ui.uicommonweb.validation.IntegerValidation;
import org.ovirt.engine.ui.uicommonweb.validation.KeyValuePairValidation;
import org.ovirt.engine.ui.uicommonweb.validation.LengthValidation;
import org.ovirt.engine.ui.uicommonweb.validation.NotEmptyValidation;
import org.ovirt.engine.ui.uicommonweb.validation.RegexValidation;
import org.ovirt.engine.ui.uicompat.FrontendQueryAsyncResult;
import org.ovirt.engine.ui.uicompat.IFrontendQueryAsyncCallback;
@SuppressWarnings("unused")
public class HostModel extends Model
{
public static final int HostNameMaxLength = 255;
public static final String PmSecureKey = "secure";
public static final String PmPortKey = "port";
public static final String PmSlotKey = "slot";
public static final String BeginTestStage = "BeginTest";
public static final String EndTestStage = "EndTest";
private UICommand privateTestCommand;
public UICommand getTestCommand()
{
return privateTestCommand;
}
private void setTestCommand(UICommand value)
{
privateTestCommand = value;
}
public boolean getIsNew()
{
return getHostId() == null;
}
private NGuid privateHostId;
public NGuid getHostId()
{
return privateHostId;
}
public void setHostId(NGuid value)
{
privateHostId = value;
}
private String privateOriginalName;
public String getOriginalName()
{
return privateOriginalName;
}
public void setOriginalName(String value)
{
privateOriginalName = value;
}
private EntityModel privateName;
public EntityModel getName()
{
return privateName;
}
private void setName(EntityModel value)
{
privateName = value;
}
private EntityModel privateHost;
public EntityModel getHost()
{
return privateHost;
}
private void setHost(EntityModel value)
{
privateHost = value;
}
private EntityModel privateManagementIp;
public EntityModel getManagementIp()
{
return privateManagementIp;
}
private void setManagementIp(EntityModel value)
{
privateManagementIp = value;
}
private ListModel privateDataCenter;
public ListModel getDataCenter()
{
return privateDataCenter;
}
private void setDataCenter(ListModel value)
{
privateDataCenter = value;
}
private ListModel privateCluster;
public ListModel getCluster()
{
return privateCluster;
}
private void setCluster(ListModel value)
{
privateCluster = value;
}
private EntityModel privatePort;
public EntityModel getPort()
{
return privatePort;
}
private void setPort(EntityModel value)
{
privatePort = value;
}
private EntityModel privateRootPassword;
public EntityModel getRootPassword()
{
return privateRootPassword;
}
private void setRootPassword(EntityModel value)
{
privateRootPassword = value;
}
private EntityModel privateOverrideIpTables;
public EntityModel getOverrideIpTables()
{
return privateOverrideIpTables;
}
private void setOverrideIpTables(EntityModel value)
{
privateOverrideIpTables = value;
}
private EntityModel privateIsPm;
public EntityModel getIsPm()
{
return privateIsPm;
}
private void setIsPm(EntityModel value)
{
privateIsPm = value;
}
private EntityModel privatePmUserName;
public EntityModel getPmUserName()
{
return privatePmUserName;
}
private void setPmUserName(EntityModel value)
{
privatePmUserName = value;
}
private EntityModel privatePmPassword;
public EntityModel getPmPassword()
{
return privatePmPassword;
}
private void setPmPassword(EntityModel value)
{
privatePmPassword = value;
}
private ListModel privatePmType;
public ListModel getPmType()
{
return privatePmType;
}
private void setPmType(ListModel value)
{
privatePmType = value;
}
private EntityModel privatePmSecure;
public EntityModel getPmSecure()
{
return privatePmSecure;
}
private void setPmSecure(EntityModel value)
{
privatePmSecure = value;
}
private EntityModel privatePmPort;
public EntityModel getPmPort()
{
return privatePmPort;
}
private void setPmPort(EntityModel value)
{
privatePmPort = value;
}
private EntityModel privatePmSlot;
public EntityModel getPmSlot()
{
return privatePmSlot;
}
private void setPmSlot(EntityModel value)
{
privatePmSlot = value;
}
private EntityModel privatePmOptions;
public EntityModel getPmOptions()
{
return privatePmOptions;
}
private void setPmOptions(EntityModel value)
{
privatePmOptions = value;
}
private boolean isGeneralTabValid;
public boolean getIsGeneralTabValid()
{
return isGeneralTabValid;
}
public void setIsGeneralTabValid(boolean value)
{
if (isGeneralTabValid != value)
{
isGeneralTabValid = value;
OnPropertyChanged(new PropertyChangedEventArgs("IsGeneralTabValid"));
}
}
private boolean isPowerManagementTabValid;
public boolean getIsPowerManagementTabValid()
{
return isPowerManagementTabValid;
}
public void setIsPowerManagementTabValid(boolean value)
{
if (isPowerManagementTabValid != value)
{
isPowerManagementTabValid = value;
OnPropertyChanged(new PropertyChangedEventArgs("IsPowerManagementTabValid"));
}
}
private boolean isPowerManagementTabSelected;
public boolean getIsPowerManagementTabSelected()
{
return isPowerManagementTabSelected;
}
public void setIsPowerManagementTabSelected(boolean value)
{
if (isPowerManagementTabSelected != value)
{
isPowerManagementTabSelected = value;
OnPropertyChanged(new PropertyChangedEventArgs("IsPowerManagementTabSelected"));
}
}
public java.util.HashMap<String, String> getPmOptionsMap()
{
java.util.HashMap<String, String> dict = new java.util.HashMap<String, String>();
// Add well known pm options.
if (getPmPort().getIsAvailable())
{
dict.put(PmPortKey, getPmPort().getEntity() == null ? "" : (String) getPmPort().getEntity());
}
if (getPmSlot().getIsAvailable())
{
dict.put(PmSlotKey, getPmSlot().getEntity() == null ? "" : (String) getPmSlot().getEntity());
}
if (getPmSecure().getIsAvailable())
{
dict.put(PmSecureKey, getPmSecure().getEntity().toString());
}
// Add unknown pm options.
// Assume Validate method was called before this getter.
String pmOptions = (String) getPmOptions().getEntity();
if (!StringHelper.isNullOrEmpty(pmOptions))
{
for (String pair : pmOptions.split("[,]", -1))
{
String[] array = pair.split("[=]", -1);
if (array.length == 2)
{
dict.put(array[0], array[1]);
}
else if (array.length == 1)
{
dict.put(array[0], "");
}
}
}
return dict;
}
public void setPmOptionsMap(java.util.HashMap<String, String> value)
{
String pmOptions = "";
for (java.util.Map.Entry<String, String> pair : value.entrySet())
{
String k = pair.getKey();
String v = pair.getValue();
// C# TO JAVA CONVERTER NOTE: The following 'switch' operated on a string member and was converted to Java
// 'if-else' logic:
// switch (k)
// Handle well known pm options.
// ORIGINAL LINE: case PmPortKey:
if (StringHelper.stringsEqual(k, PmPortKey))
{
getPmPort().setEntity(StringHelper.isNullOrEmpty(value.get(k)) ? "" : value.get(k));
}
// ORIGINAL LINE: case PmSlotKey:
else if (StringHelper.stringsEqual(k, PmSlotKey))
{
getPmSlot().setEntity(StringHelper.isNullOrEmpty(value.get(k)) ? "" : value.get(k));
}
// ORIGINAL LINE: case PmSecureKey:
else if (StringHelper.stringsEqual(k, PmSecureKey))
{
getPmSecure().setEntity(Boolean.parseBoolean(value.get(k)));
}
else
{
// Compose custom string from unknown pm options.
if (StringHelper.isNullOrEmpty(v))
{
pmOptions += StringFormat.format("%1$s,", k);
}
else
{
pmOptions += StringFormat.format("%1$s=%2$s,", k, v);
}
}
}
if (!StringHelper.isNullOrEmpty(pmOptions))
{
getPmOptions().setEntity(pmOptions.substring(0, pmOptions.length() - 1));
}
}
public HostModel()
{
setTestCommand(new UICommand("Test", this));
setName(new EntityModel());
setHost(new EntityModel());
setManagementIp(new EntityModel());
setDataCenter(new ListModel());
getDataCenter().getSelectedItemChangedEvent().addListener(this);
setCluster(new ListModel());
getCluster().getSelectedItemChangedEvent().addListener(this);
setPort(new EntityModel());
setRootPassword(new EntityModel());
EntityModel tempVar = new EntityModel();
tempVar.setEntity(false);
setOverrideIpTables(tempVar);
setPmUserName(new EntityModel());
setPmPassword(new EntityModel());
setPmType(new ListModel());
getPmType().getSelectedItemChangedEvent().addListener(this);
setPmSecure(new EntityModel());
getPmSecure().setIsAvailable(false);
getPmSecure().setEntity(false);
setPmPort(new EntityModel());
getPmPort().setIsAvailable(false);
setPmSlot(new EntityModel());
getPmSlot().setIsAvailable(false);
setPmOptions(new EntityModel());
setIsPm(new EntityModel());
getIsPm().getEntityChangedEvent().addListener(this);
getIsPm().setEntity(false);
setIsPowerManagementTabValid(true);
setIsGeneralTabValid(getIsPowerManagementTabValid());
}
@Override
public void eventRaised(Event ev, Object sender, EventArgs args)
{
super.eventRaised(ev, sender, args);
if (ev.equals(ListModel.SelectedItemChangedEventDefinition) && sender == getDataCenter())
{
DataCenter_SelectedItemChanged();
}
else if (ev.equals(ListModel.SelectedItemChangedEventDefinition) && sender == getCluster())
{
Cluster_SelectedItemChanged();
}
else if (ev.equals(ListModel.SelectedItemChangedEventDefinition) && sender == getPmType())
{
PmType_SelectedItemChanged();
}
else if (ev.equals(EntityModel.EntityChangedEventDefinition) && sender == getIsPm())
{
IsPm_EntityChanged();
}
}
private void IsPm_EntityChanged()
{
UpdatePmModels();
}
private void DataCenter_SelectedItemChanged()
{
storage_pool dataCenter = (storage_pool) getDataCenter().getSelectedItem();
if (dataCenter != null)
{
AsyncQuery _asyncQuery = new AsyncQuery();
_asyncQuery.setModel(this);
_asyncQuery.asyncCallback = new INewAsyncCallback() {
@Override
public void OnSuccess(Object model, Object result)
{
HostModel hostModel = (HostModel) model;
java.util.ArrayList<VDSGroup> clusters = (java.util.ArrayList<VDSGroup>) result;
VDSGroup oldCluster = (VDSGroup) hostModel.getCluster().getSelectedItem();
storage_pool selectedDataCenter = (storage_pool) getDataCenter().getSelectedItem();
// Update selected cluster only if the returned cluster list is indeed the selected datacenter's
// clusters
if (clusters.isEmpty()
|| clusters.size() > 0
&& clusters.get(0)
.getstorage_pool_id()
.getValue()
.equals(selectedDataCenter.getId().getValue()))
{
hostModel.getCluster().setItems(clusters);
if (oldCluster != null)
{
VDSGroup newSelectedItem =
Linq.FirstOrDefault(clusters, new Linq.ClusterPredicate(oldCluster.getId()));
if (newSelectedItem != null)
{
hostModel.getCluster().setSelectedItem(newSelectedItem);
}
}
if (hostModel.getCluster().getSelectedItem() == null)
{
hostModel.getCluster().setSelectedItem(Linq.FirstOrDefault(clusters));
}
}
}
};
AsyncDataProvider.GetClusterList(_asyncQuery, dataCenter.getId());
}
}
private void Cluster_SelectedItemChanged()
{
VDSGroup cluster = (VDSGroup) getCluster().getSelectedItem();
if (cluster != null)
{
AsyncQuery _asyncQuery = new AsyncQuery();
_asyncQuery.setModel(this);
_asyncQuery.asyncCallback = new INewAsyncCallback() {
@Override
public void OnSuccess(Object model, Object result)
{
HostModel hostModel = (HostModel) model;
java.util.ArrayList<String> pmTypeList = (java.util.ArrayList<String>) result;
String pmType = (String) hostModel.getPmType().getSelectedItem();
hostModel.getPmType().setItems(pmTypeList);
if (pmTypeList.contains(pmType))
{
hostModel.getPmType().setSelectedItem(pmType);
}
else
{
hostModel.getPmType().setSelectedItem(null);
}
}
};
AsyncDataProvider.GetPmTypeList(_asyncQuery, cluster.getcompatibility_version());
}
}
private void PmType_SelectedItemChanged()
{
UpdatePmModels();
}
private void UpdatePmModels()
{
String pmType = (String) getPmType().getSelectedItem();
AsyncQuery _asyncQuery = new AsyncQuery();
_asyncQuery.setModel(this);
_asyncQuery.asyncCallback = new INewAsyncCallback() {
@Override
public void OnSuccess(Object model, Object result)
{
HostModel hostModel = (HostModel) model;
hostModel.postGetPmOptions((java.util.ArrayList<String>) result);
}
};
if (!StringHelper.isNullOrEmpty(pmType))
{
AsyncDataProvider.GetPmOptions(_asyncQuery, pmType);
}
else
{
postGetPmOptions(new java.util.ArrayList<String>());
}
}
public void postGetPmOptions(java.util.ArrayList<String> pmOptions)
{
getPmPort().setIsAvailable(pmOptions.contains(PmPortKey));
getPmSlot().setIsAvailable(pmOptions.contains(PmSlotKey));
getPmSecure().setIsAvailable(pmOptions.contains(PmSecureKey));
boolean isPm = (Boolean) getIsPm().getEntity();
getTestCommand().setIsExecutionAllowed(isPm);
getManagementIp().setIsChangable((Boolean) getIsPm().getEntity());
getManagementIp().setIsValid(true);
getPmUserName().setIsChangable((Boolean) getIsPm().getEntity());
getPmUserName().setIsValid(true);
getPmPassword().setIsChangable((Boolean) getIsPm().getEntity());
getPmPassword().setIsValid(true);
getPmType().setIsChangable((Boolean) getIsPm().getEntity());
getPmType().setIsValid(true);
getPmOptions().setIsChangable((Boolean) getIsPm().getEntity());
getPmSecure().setIsChangable((Boolean) getIsPm().getEntity());
getPmPort().setIsChangable((Boolean) getIsPm().getEntity());
getPmPort().setIsValid(true);
getPmSlot().setIsChangable((Boolean) getIsPm().getEntity());
}
public void Test()
{
// Validate user input.
if ((Boolean) getIsPm().getEntity())
{
getCluster().setIsValid(true);
getCluster().ValidateSelectedItem(new IValidation[] { new NotEmptyValidation() });
ValidatePmModels();
}
if (!getManagementIp().getIsValid() || !getPmUserName().getIsValid() || !getPmPassword().getIsValid()
|| !getPmType().getIsValid() || !getPmPort().getIsValid() || !getPmOptions().getIsValid())
{
return;
}
setMessage("Testing in progress. It will take a few seconds. Please wait...");
getTestCommand().setIsExecutionAllowed(false);
VDSGroup cluster = (VDSGroup) getCluster().getSelectedItem();
GetNewVdsFenceStatusParameters param = new GetNewVdsFenceStatusParameters();
if (getHostId() != null)
{
param.setVdsId(getHostId().getValue());
}
param.setManagementIp((String) getManagementIp().getEntity());
param.setPmType((String) getPmType().getSelectedItem());
param.setUser((String) getPmUserName().getEntity());
param.setPassword((String) getPmPassword().getEntity());
param.setStoragePoolId(cluster.getstorage_pool_id().getValue() != null ? cluster.getstorage_pool_id()
.getValue()
.getValue() : NGuid.Empty);
param.setFencingOptions(new ValueObjectMap(getPmOptionsMap(), false));
Frontend.RunQuery(VdcQueryType.GetNewVdsFenceStatus, param, new IFrontendQueryAsyncCallback() {
@Override
public void OnSuccess(FrontendQueryAsyncResult result) {
if (result != null && result.getReturnValue() != null
&& result.getReturnValue().getReturnValue() != null) {
FenceStatusReturnValue fenceStatusReturnValue =
(FenceStatusReturnValue) result.getReturnValue().getReturnValue();
String message = fenceStatusReturnValue.toString();
setMessage(message);
getTestCommand().setIsExecutionAllowed(true);
}
}
@Override
public void OnFailure(FrontendQueryAsyncResult result) {
- String message = "Test Failed (unknown error).";
+ String message;
+ if (result != null && result.getReturnValue() != null
+ && result.getReturnValue().getReturnValue() != null) {
+ FenceStatusReturnValue fenceStatusReturnValue =
+ (FenceStatusReturnValue) result.getReturnValue().getReturnValue();
+ message = fenceStatusReturnValue.toString();
+ } else {
+ message = "Test Failed (unknown error).";
+ }
setMessage(message);
getTestCommand().setIsExecutionAllowed(true);
}
});
}
private void ValidatePmModels()
{
getManagementIp().ValidateEntity(new IValidation[] { new NotEmptyValidation(), new HostAddressValidation() });
getPmUserName().ValidateEntity(new IValidation[] { new NotEmptyValidation() });
getPmPassword().ValidateEntity(new IValidation[] { new NotEmptyValidation() });
getPmType().ValidateSelectedItem(new IValidation[] { new NotEmptyValidation() });
IntegerValidation tempVar = new IntegerValidation();
tempVar.setMinimum(1);
tempVar.setMaximum(65535);
getPmPort().ValidateEntity(new IValidation[] { tempVar });
getPmOptions().ValidateEntity(new IValidation[] { new KeyValuePairValidation(true) });
}
public boolean Validate()
{
String hostNameRegex = StringFormat.format("^[0-9a-zA-Z-_\\.]{1,%1$s}$", HostNameMaxLength);
String hostNameMessage =
StringFormat.format("This field can't contain blanks or special characters, must "
+ "be at least one character long, legal values are 0-9, a-z, '_', '.' "
+ "and a length of up to %1$s characters.", HostNameMaxLength);
LengthValidation tempVar = new LengthValidation();
tempVar.setMaxLength(255);
RegexValidation tempVar2 = new RegexValidation();
tempVar2.setExpression(hostNameRegex);
tempVar2.setMessage(hostNameMessage);
getName().ValidateEntity(new IValidation[] { new NotEmptyValidation(), tempVar, tempVar2 });
LengthValidation tempVar3 = new LengthValidation();
tempVar3.setMaxLength(255);
getHost().ValidateEntity(new IValidation[] { new NotEmptyValidation(), tempVar3, new HostAddressValidation() });
IntegerValidation tempVar4 = new IntegerValidation();
tempVar4.setMinimum(1);
tempVar4.setMaximum(65535);
getPort().ValidateEntity(new IValidation[] { new NotEmptyValidation(), tempVar4 });
getDataCenter().ValidateSelectedItem(new IValidation[] { new NotEmptyValidation() });
getCluster().ValidateSelectedItem(new IValidation[] { new NotEmptyValidation() });
// TODO: async validation.
// string name = (string)Name.Entity;
// //Check name unicitate.
// if (String.Compare(name, OriginalName, true) != 0 && !DataProvider.IsHostNameUnique(name))
// {
// Name.IsValid = false;
// Name.InvalidityReasons.Add("Name must be unique.");
// }
if ((Boolean) getIsPm().getEntity())
{
ValidatePmModels();
}
setIsGeneralTabValid(getName().getIsValid() && getHost().getIsValid() && getPort().getIsValid()
&& getCluster().getIsValid());
setIsPowerManagementTabValid(getManagementIp().getIsValid() && getPmUserName().getIsValid()
&& getPmPassword().getIsValid() && getPmType().getIsValid() && getPmPort().getIsValid()
&& getPmOptions().getIsValid());
return getName().getIsValid() && getHost().getIsValid() && getPort().getIsValid() && getCluster().getIsValid()
&& getManagementIp().getIsValid() && getPmUserName().getIsValid() && getPmPassword().getIsValid()
&& getPmType().getIsValid() && getPmPort().getIsValid() && getPmOptions().getIsValid();
}
@Override
public void ExecuteCommand(UICommand command)
{
super.ExecuteCommand(command);
if (command == getTestCommand())
{
Test();
}
}
}
| true
| true
|
public void Test()
{
// Validate user input.
if ((Boolean) getIsPm().getEntity())
{
getCluster().setIsValid(true);
getCluster().ValidateSelectedItem(new IValidation[] { new NotEmptyValidation() });
ValidatePmModels();
}
if (!getManagementIp().getIsValid() || !getPmUserName().getIsValid() || !getPmPassword().getIsValid()
|| !getPmType().getIsValid() || !getPmPort().getIsValid() || !getPmOptions().getIsValid())
{
return;
}
setMessage("Testing in progress. It will take a few seconds. Please wait...");
getTestCommand().setIsExecutionAllowed(false);
VDSGroup cluster = (VDSGroup) getCluster().getSelectedItem();
GetNewVdsFenceStatusParameters param = new GetNewVdsFenceStatusParameters();
if (getHostId() != null)
{
param.setVdsId(getHostId().getValue());
}
param.setManagementIp((String) getManagementIp().getEntity());
param.setPmType((String) getPmType().getSelectedItem());
param.setUser((String) getPmUserName().getEntity());
param.setPassword((String) getPmPassword().getEntity());
param.setStoragePoolId(cluster.getstorage_pool_id().getValue() != null ? cluster.getstorage_pool_id()
.getValue()
.getValue() : NGuid.Empty);
param.setFencingOptions(new ValueObjectMap(getPmOptionsMap(), false));
Frontend.RunQuery(VdcQueryType.GetNewVdsFenceStatus, param, new IFrontendQueryAsyncCallback() {
@Override
public void OnSuccess(FrontendQueryAsyncResult result) {
if (result != null && result.getReturnValue() != null
&& result.getReturnValue().getReturnValue() != null) {
FenceStatusReturnValue fenceStatusReturnValue =
(FenceStatusReturnValue) result.getReturnValue().getReturnValue();
String message = fenceStatusReturnValue.toString();
setMessage(message);
getTestCommand().setIsExecutionAllowed(true);
}
}
@Override
public void OnFailure(FrontendQueryAsyncResult result) {
String message = "Test Failed (unknown error).";
setMessage(message);
getTestCommand().setIsExecutionAllowed(true);
}
});
}
|
public void Test()
{
// Validate user input.
if ((Boolean) getIsPm().getEntity())
{
getCluster().setIsValid(true);
getCluster().ValidateSelectedItem(new IValidation[] { new NotEmptyValidation() });
ValidatePmModels();
}
if (!getManagementIp().getIsValid() || !getPmUserName().getIsValid() || !getPmPassword().getIsValid()
|| !getPmType().getIsValid() || !getPmPort().getIsValid() || !getPmOptions().getIsValid())
{
return;
}
setMessage("Testing in progress. It will take a few seconds. Please wait...");
getTestCommand().setIsExecutionAllowed(false);
VDSGroup cluster = (VDSGroup) getCluster().getSelectedItem();
GetNewVdsFenceStatusParameters param = new GetNewVdsFenceStatusParameters();
if (getHostId() != null)
{
param.setVdsId(getHostId().getValue());
}
param.setManagementIp((String) getManagementIp().getEntity());
param.setPmType((String) getPmType().getSelectedItem());
param.setUser((String) getPmUserName().getEntity());
param.setPassword((String) getPmPassword().getEntity());
param.setStoragePoolId(cluster.getstorage_pool_id().getValue() != null ? cluster.getstorage_pool_id()
.getValue()
.getValue() : NGuid.Empty);
param.setFencingOptions(new ValueObjectMap(getPmOptionsMap(), false));
Frontend.RunQuery(VdcQueryType.GetNewVdsFenceStatus, param, new IFrontendQueryAsyncCallback() {
@Override
public void OnSuccess(FrontendQueryAsyncResult result) {
if (result != null && result.getReturnValue() != null
&& result.getReturnValue().getReturnValue() != null) {
FenceStatusReturnValue fenceStatusReturnValue =
(FenceStatusReturnValue) result.getReturnValue().getReturnValue();
String message = fenceStatusReturnValue.toString();
setMessage(message);
getTestCommand().setIsExecutionAllowed(true);
}
}
@Override
public void OnFailure(FrontendQueryAsyncResult result) {
String message;
if (result != null && result.getReturnValue() != null
&& result.getReturnValue().getReturnValue() != null) {
FenceStatusReturnValue fenceStatusReturnValue =
(FenceStatusReturnValue) result.getReturnValue().getReturnValue();
message = fenceStatusReturnValue.toString();
} else {
message = "Test Failed (unknown error).";
}
setMessage(message);
getTestCommand().setIsExecutionAllowed(true);
}
});
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.