answer
stringlengths 17
10.2M
|
|---|
package org.cytoscape.ding.impl.cyannotator.annotations;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Paint;
import java.awt.Shape;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.cytoscape.ding.impl.DRenderingEngine;
import org.cytoscape.ding.impl.cyannotator.utils.ViewUtils;
import org.cytoscape.view.presentation.annotations.Annotation;
import org.cytoscape.view.presentation.annotations.ShapeAnnotation;
public class ShapeAnnotationImpl extends AbstractAnnotation implements ShapeAnnotation {
private ShapeType shapeType;
private double borderWidth = 1.0;
private Paint borderColor = Color.BLACK; // These are paint's so we can do gradients
private Paint fillColor; // These are paint's so we can do gradients
private double borderOpacity = 100.0;
private double fillOpacity = 100.0;
private Shape shape;
protected double factor = 1.0;
public ShapeAnnotationImpl(DRenderingEngine re, double width, double height, boolean usedForPreviews) {
super(re, usedForPreviews);
setSize(width, height);
shapeType = ShapeType.RECTANGLE;
borderWidth = 1.0;
}
public ShapeAnnotationImpl(ShapeAnnotationImpl c, double width, double height, boolean usedForPreviews) {
super(c, usedForPreviews);
setSize(width, height);
this.width = width;
this.height = height;
shapeType = GraphicsUtilities.getShapeType(c.getShapeType());
borderColor = c.getBorderColor();
borderWidth = c.getBorderWidth();
fillColor = c.getFillColor();
name = c.getName() != null ? c.getName() : getDefaultName();
if (shapeType == ShapeType.CUSTOM)
shape = GraphicsUtilities.copyCustomShape(c.getShape(), width, height);
else
shape = GraphicsUtilities.getShape(shapeType.shapeName(), 0.0, 0.0, width, height);
}
public ShapeAnnotationImpl(
DRenderingEngine re,
double x,
double y,
double rotation,
ShapeType shapeType,
double width,
double height,
Paint fillColor,
Paint edgeColor,
float edgeThickness
) {
super(re, x, y, rotation);
this.shapeType = shapeType;
this.fillColor = fillColor;
this.borderColor = edgeColor;
this.borderWidth = edgeThickness;
this.width = width;
this.height = height;
this.shape = GraphicsUtilities.getShape(shapeType.shapeName(), 0.0, 0.0, width, height);
}
public ShapeAnnotationImpl(DRenderingEngine re, Map<String, String> argMap) {
super(re, argMap);
double zoom = getLegacyZoom(argMap);
// If this is an old bounded text, we might not (yet) have a width or height
width = ViewUtils.getDouble(argMap, ShapeAnnotation.WIDTH, 100.0) / zoom;
height = ViewUtils.getDouble(argMap, ShapeAnnotation.HEIGHT, 100.0) / zoom;
fillColor = ViewUtils.getColor(argMap, FILLCOLOR, null);
fillOpacity = ViewUtils.getDouble(argMap, FILLOPACITY, 100.0);
borderWidth = ViewUtils.getDouble(argMap, EDGETHICKNESS, 1.0) / zoom;
borderColor = ViewUtils.getColor(argMap, EDGECOLOR, Color.BLACK);
borderOpacity = ViewUtils.getDouble(argMap, EDGEOPACITY, 100.0);
shapeType = GraphicsUtilities.getShapeType(argMap, SHAPETYPE, ShapeType.RECTANGLE);
if (shapeType != ShapeType.CUSTOM)
shape = GraphicsUtilities.getShape(shapeType.shapeName(), 0.0, 0.0, width, height);
else if (argMap.containsKey(CUSTOMSHAPE))
shape = GraphicsUtilities.deserializeShape(argMap.get(CUSTOMSHAPE));
}
@Override
public Class<? extends Annotation> getType() {
return ShapeAnnotation.class;
}
@Override
public Map<String, String> getArgMap() {
var argMap = super.getArgMap();
argMap.put(TYPE, ShapeAnnotation.class.getName());
if (fillColor != null)
argMap.put(FILLCOLOR, ViewUtils.convertColor(fillColor));
argMap.put(FILLOPACITY, Double.toString(fillOpacity));
if (borderColor != null)
argMap.put(EDGECOLOR, ViewUtils.convertColor(borderColor));
argMap.put(EDGETHICKNESS, Double.toString(borderWidth));
argMap.put(EDGEOPACITY, Double.toString(borderOpacity));
if (shapeType != null) {
argMap.put(SHAPETYPE, shapeType.name());
if (shapeType.equals(ShapeType.CUSTOM) && shape != null)
argMap.put(CUSTOMSHAPE, GraphicsUtilities.serializeShape(shape));
}
argMap.put(ShapeAnnotation.WIDTH, Double.toString(width));
argMap.put(ShapeAnnotation.HEIGHT, Double.toString(height));
return argMap;
}
/**
* Width and height are not applied, only colors, shape, etc.
*/
@Override
public void setStyle(Map<String, String> argMap) {
if (argMap != null) {
double zoom = getLegacyZoom(argMap);
setFillColor(ViewUtils.getColor(argMap, FILLCOLOR, null));
setFillOpacity(ViewUtils.getDouble(argMap, FILLOPACITY, 100.0));
setBorderWidth(ViewUtils.getDouble(argMap, EDGETHICKNESS, 1.0) / zoom);
setBorderColor(ViewUtils.getColor(argMap, EDGECOLOR, Color.BLACK));
setBorderOpacity(ViewUtils.getDouble(argMap, EDGEOPACITY, 100.0));
setShapeType(GraphicsUtilities.getShapeType(argMap, SHAPETYPE, ShapeType.RECTANGLE));
}
}
@Override
public List<String> getSupportedShapes() {
return GraphicsUtilities.getSupportedShapes();
}
@Override
public Shape getShape() {
return shape;
}
@Override
public String getShapeType() {
return shapeType.shapeName();
}
public ShapeType getShapeTypeEnum() {
return shapeType;
}
public void setShapeType(ShapeType type) {
if (shapeType != type) {
var oldValue = shapeType;
shapeType = type;
if (shapeType != ShapeType.CUSTOM)
shape = GraphicsUtilities.getShape(shapeType.shapeName(), 0.0, 0.0, width, height);
update();
firePropertyChange("shapeType", oldValue, type);
}
}
@Override
public void setShapeType(String type) {
var shapeType = getShapeFromString(type);
if (!Objects.equals(this.shapeType, shapeType))
setShapeType(shapeType);
}
@Override
public double getBorderWidth() {
return borderWidth;
}
@Override
public void setBorderWidth(double width) {
if (borderWidth != width) {
var oldValue = borderWidth;
borderWidth = width;
update();
firePropertyChange("borderWidth", oldValue, width);
}
}
@Override
public Paint getBorderColor() {
return borderColor;
}
@Override
public double getBorderOpacity() {
return borderOpacity;
}
@Override
public Paint getFillColor() {
return fillColor;
}
@Override
public double getFillOpacity() {
return fillOpacity;
}
@Override
public void setBorderColor(Paint color) {
if (!Objects.equals(borderColor, color)) {
var oldValue = borderColor;
borderColor = color;
update();
firePropertyChange("borderColor", oldValue, color);
}
}
@Override
public void setBorderOpacity(double opacity) {
if (borderOpacity != opacity) {
var oldValue = borderOpacity;
borderOpacity = opacity;
update();
firePropertyChange("borderOpacity", oldValue, opacity);
}
}
@Override
public void setFillColor(Paint color) {
if (!Objects.equals(fillColor, color)) {
var oldValue = fillColor;
fillColor = color;
update();
firePropertyChange("fillColor", oldValue, color);
}
}
@Override
public void setFillOpacity(double opacity) {
if (fillOpacity != opacity) {
var oldValue = fillOpacity;
fillOpacity = opacity;
update();
firePropertyChange("fillOpacity", oldValue, opacity);
}
}
@Override
public void setCustomShape(String stringShape) {
Shape shape = GraphicsUtilities.deserializeShape(stringShape);
setCustomShape(shape);
}
@Override
public void setCustomShape(Shape shape) {
if (!Objects.equals(this.shape, shape)) {
var oldValue = this.shape;
this.shapeType = ShapeType.CUSTOM;
this.shape = shape;
update();
firePropertyChange("shape", oldValue, shape);
}
}
@Override
public void paint(Graphics g, boolean showSelection) {
super.paint(g, showSelection);
if(this.borderOpacity == 0 && this.fillOpacity == 0) // not here as an optimization, avoids invisible artifacts when exporting PDF
return;
GraphicsUtilities.drawShape(g, getX(), getY(), getWidth(), getHeight(), getRotation(), this, false);
}
private ShapeType getShapeFromString(String shapeName) {
for (var type : ShapeType.values()) {
if (type.shapeName().equals(shapeName))
return type;
}
return ShapeType.RECTANGLE;
}
}
|
package org.ovirt.engine.ui.common.widget.table.column;
import org.ovirt.engine.ui.common.CommonApplicationResources;
import org.ovirt.engine.ui.uicommonweb.models.storage.LunModel;
import com.google.gwt.cell.client.AbstractCell;
import com.google.gwt.core.client.GWT;
import com.google.gwt.safehtml.shared.SafeHtmlBuilder;
import com.google.gwt.safehtml.shared.SafeHtmlUtils;
public class LunSelectionCell extends AbstractCell<LunModel> {
protected static final CommonApplicationResources resources = GWT.create(CommonApplicationResources.class);
private boolean multiSelection;
public LunSelectionCell() {
}
public LunSelectionCell(boolean multiSelection) {
this.multiSelection = multiSelection;
}
@Override
public void render(Context context, LunModel value, SafeHtmlBuilder sb) {
StyledImageResourceCell imageCell = new StyledImageResourceCell();
imageCell.setStyle("text-align: center;"); //$NON-NLS-1$
if (value.getIsIncluded()) {
imageCell.render(context, resources.okSmallImage(), sb);
} else if (!value.getIsAccessible()) {
imageCell.render(context, resources.logWarningImage(), sb);
} else {
sb.append(SafeHtmlUtils.fromTrustedString("<span style=\"padding-left: 1px;\">")); //$NON-NLS-1$
String type = multiSelection ? "type='checkbox' " : "type='radio' "; //$NON-NLS-1$ //$NON-NLS-2$
String checked = value.getIsSelected() ? "checked='checked' " : ""; //$NON-NLS-1$ //$NON-NLS-2$
String disabled = value.getIsGrayedOut() ? "disabled='disabled' " : ""; //$NON-NLS-1$ //$NON-NLS-2$
String input = "<input " + type + checked + disabled + " tabindex='-1'/>"; //$NON-NLS-1$ //$NON-NLS-2$
sb.append(SafeHtmlUtils.fromTrustedString(input));
sb.append(SafeHtmlUtils.fromTrustedString("</span>")); //$NON-NLS-1$
}
}
}
|
package org.jtalks.jcommune.web.validation;
import org.springframework.mock.web.MockMultipartFile;
import org.springframework.web.multipart.MultipartFile;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import javax.validation.ConstraintViolation;
import javax.validation.Validation;
import javax.validation.Validator;
import javax.validation.ValidatorFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Set;
/**
* @author Eugeny Batov
*/
public class ImageDimensionValidatorTest {
/**
* Class for testing constraint.
*/
public class TestObject {
@ImageDimension(width = 4, height = 4)
private MultipartFile avatar;
public TestObject(MockMultipartFile avatar) {
this.avatar = avatar;
}
}
private static Validator validator;
@BeforeClass
public static void setUp() {
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
validator = factory.getValidator();
}
@Test
public void testValidatorNormalDimension() {
Set<ConstraintViolation<TestObject>> constraintViolations =
validator.validate(new TestObject(new MockMultipartFile("test_avatar", "test_avatar",
"image/png", normalAvatarByteArray)));
Assert.assertEquals(constraintViolations.size(), 0, "Validation errors");
}
@Test
public void testValidatorLittleDimension() {
Set<ConstraintViolation<TestObject>> constraintViolations =
validator.validate(new TestObject(new MockMultipartFile("test_avatar", "test_avatar",
"image/png", littleAvatarByteArray)));
Assert.assertEquals(constraintViolations.size(), 1, "Validation without errors");
Assert.assertNotNull(constraintViolations.iterator().next().getMessage());
}
@Test
public void testValidatorBigDimension() {
Set<ConstraintViolation<TestObject>> constraintViolations =
validator.validate(new TestObject(new MockMultipartFile("test_avatar", "test_avatar",
"image/png", bigAvatarByteArray)));
Assert.assertEquals(constraintViolations.size(), 1, "Validation without errors");
Assert.assertNotNull(constraintViolations.iterator().next().getMessage());
}
@Test
public void testValidatorImageNull() {
Set<ConstraintViolation<TestObject>> constraintViolations =
validator.validate(new TestObject(new MockMultipartFile("test_avatar", "",
"application/octet-stream",
new byte[0])));
Assert.assertEquals(constraintViolations.size(), 0, "Validation errors");
}
@Test
public void testValidatorNotImage() {
Set<ConstraintViolation<TestObject>> constraintViolations =
validator.validate(new TestObject(new MockMultipartFile("test_avatar", "",
"text/plain",
new byte[1024])));
Assert.assertEquals(constraintViolations.size(), 1, "Validation without errors");
Assert.assertNotNull(constraintViolations.iterator().next().getMessage());
}
@Test
public void testValidatorImageIco() {
Set<ConstraintViolation<TestObject>> constraintViolations =
validator.validate(new TestObject(new MockMultipartFile("test_avatar", "",
"image/ico",
icoAvatarByteArray)));
Assert.assertEquals(constraintViolations.size(), 1, "Validation without errors");
Assert.assertNotNull(constraintViolations.iterator().next().getMessage());
}
private byte[] normalAvatarByteArray = new byte[]{-119, 80, 78, 71, 13, 10, 26, 10, 0, 0, 0, 13, 73, 72, 68, 82, 0,
0, 0, 4, 0, 0, 0, 4, 1, 0, 0, 0, 0, -127, -118, -93, -45, 0, 0, 0, 9, 112, 72, 89, 115, 0, 0, 1,
-118, 0, 0, 1, -118, 1, 51, -105, 48, 88, 0, 0, 0, 32, 99, 72, 82, 77, 0, 0, 122, 37, 0, 0,
-128, -125, 0, 0, -7, -1, 0, 0, -128, -23, 0, 0, 117, 48, 0, 0, -22, 96, 0, 0, 58, -104, 0, 0,
23, 111, -110, 95, -59, 70, 0, 0, 0, 22, 73, 68, 65, 84, 120, -38, 98, -40, -49, -60, -64, -92,
-64, -60, 0, 0, 0, 0, -1, -1, 3, 0, 5, -71, 0, -26, -35, -7, 32, 96, 0, 0, 0, 0, 73, 69, 78, 68,
-82, 66, 96, -126
};
private byte[] littleAvatarByteArray = new byte[]{-119, 80, 78, 71, 13, 10, 26, 10, 0, 0, 0, 13, 73, 72, 68, 82, 0,
0, 0, 3, 0, 0, 0, 3, 8, 0, 0, 0, 0, 115, 67, -22, 99, 0, 0, 0, 9, 112, 72, 89, 115, 0, 0, 1, -118, 0,
0, 1, -118, 1, 51, -105, 48, 88, 0, 0, 0, 32, 99, 72, 82, 77, 0, 0, 122, 37, 0, 0, -128, -125,
0, 0, -7, -1, 0, 0, -128, -23, 0, 0, 117, 48, 0, 0, -22, 96, 0, 0, 58, -104, 0, 0, 23, 111,
-110, 95, -59, 70, 0, 0, 0, 27, 73, 68, 65, 84, 120, -38, 98, 100, -8, -49, -64, -16, -97,
-31, 63, -61, -1, -1, 12, 0, 0, 0, 0, -1, -1, 3, 0, 26, -3, 4, -3, 23, 76, -83, 113, 0, 0, 0,
0, 73, 69, 78, 68, -82, 66, 96, -126
};
private byte[] bigAvatarByteArray = new byte[]{-119, 80, 78, 71, 13, 10, 26, 10, 0, 0, 0, 13, 73, 72, 68, 82, 0, 0,
0, 5, 0, 0, 0, 5, 8, 2, 0, 0, 0, 2, 13, -79, -78, 0, 0, 0, 9, 112, 72, 89, 115, 0, 0, 1, -118, 0,
0, 1, -118, 1, 51, -105, 48, 88, 0, 0, 0, 32, 99, 72, 82, 77, 0, 0, 122, 37, 0, 0, -128, -125,
0, 0, -7, -1, 0, 0, -128, -23, 0, 0, 117, 48, 0, 0, -22, 96, 0, 0, 58, -104, 0, 0, 23, 111,
-110, 95, -59, 70, 0, 0, 0, 54, 73, 68, 65, 84, 120, -38, 76, -55, -79, 21, -128, 32, 0, -60,
-48, 28, 14, 32, -52, -30, -2, -93, 121, -79, -112, -126, 116, -1, 37, 42, 71, 3, -72, -41, 4,
-110, -88, -88, 42, 79, -37, 110, 3, 109, -81, 12, -33, -26, -1, 73, -88, 36, -33, 0, -62, -31,
36, 71, 49, 115, -89, 85, 0, 0, 0, 0, 73, 69, 78, 68, -82, 66, 96, -126
};
private byte[] icoAvatarByteArray = new byte[]{0, 0, 1, 0, 1, 0, 16, 16, 0, 0, 1, 0, 24, 0, 104, 3, 0, 0, 22, 0,
0, 0, 40, 0, 0, 0, 16, 0, 0, 0, 32, 0, 0, 0, 1, 0, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 96, 0, 0, 0, 96, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 114, 66, 41, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0,0, 114, 66, 41, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89,
99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 114,
66, 41, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 114, 66, 41, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89, 99,
75, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 114, 66, 41, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 114, 66, 41, -89, 99, 75, -89,
99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89,
99, 75, -89, 99, 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 114, 66, 41, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -68, -69, -70, -111, -111, -111,
-111, -111, -111, -111, -111, -111, -111, -111, -111, -111, -111, -111, -111, -111, -111, -111, -111,
-111, -68, -69, -70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -111, -111, -111,
-29, -29, -29, -29, -29, -29, -29, -29, -29, -29, -29, -29, -29, -29, -29, -29, -29, -29, -29, -29,
-29, -111, -111, -111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -111, -111, -111,
-29, -29, -29, -29, -29, -29, -29, -29, -29, 114, 66, 41, -29, -29, -29, -29, -29, -29, -29, -29, -29,
-111, -111, -111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -111, -111, -111, -20,
-19, -19, -20, -19, -19, -20, -19, -19, 114, 66, 41, -20, -19, -19, -20, -19, -19, -20, -19, -19, -111,
-111, -111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -111, -111, -111, -5, -5,
-6, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, -89, 99, 75, -5, -5, -6, -111, -111, -111, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -111, -111, -111, -5, -5, -6, -5, -5, -6,
-5, -5, -6, 114, 66, 41, -5, -5, -6, -5, -5, -6, -5, -5, -6, -111, -111, -111, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -111, -111, -111, -4, -4, -4, -4, -4, -4, -4, -4, -4, 114, 66,
41, -4, -4, -4, -4, -4, -4, -4, -4, -4, -111, -111, -111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, -111, -111, -111, -4, -4, -4, -4, -4, -4, -4, -4, -4, -4, -4, -4, -4, -4, -4, -4, -4,
-4, -4, -4, -4, -111, -111, -111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -68,
-69, -70, -111, -111, -111, -111, -111, -111, -111, -111, -111, -111, -111, -111, -111, -111, -111,
-111, -111, -111, -111, -111, -111, -68, -69, -70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, -9, -1, 0, 0, -16, 0, 0, 0, -9, -1, 0, 0, -16, 0, 0, 0, -9, -1, 0, 0, -16, 0, 0, 0, -9, -1,
0, 0, 0, 127, 0, 0, 0, 127, 0, 0, 0, 127, 0, 0, 0, 127, 0, 0, 0, 127, 0, 0, 0, 127, 0, 0, 0, 127, 0,
0, 0, 127, 0, 0, 0, 127, 0, 0,
};
}
|
package org.sagebionetworks.repo.model.dbo.dao.semaphore;
import static org.junit.Assert.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.sagebionetworks.repo.model.dao.semaphore.ExclusiveOrSharedSemaphoreDao;
import org.sagebionetworks.repo.model.exception.LockUnavilableException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "classpath:jdomodels-test-context.xml" })
public class ExclusiveOrSharedSemaphoreDaoImplTest {
@Autowired
ExclusiveOrSharedSemaphoreDao exclusiveOrSharedSemaphoreDao;
@Before
public void before(){
// release all locks
exclusiveOrSharedSemaphoreDao.releaseAllLocks();
}
@After
public void after(){
// release all locks
exclusiveOrSharedSemaphoreDao.releaseAllLocks();
}
@Test
public void testHappyReadLock(){
long start = System.currentTimeMillis();
String key = "123";
// Should be able to get a read lock
String token = exclusiveOrSharedSemaphoreDao.acquireSharedLock(key, 1000);
assertNotNull(token);
// We should be able to release the lock
exclusiveOrSharedSemaphoreDao.releaseSharedLock(key, token);
System.out.println("Shared lock timing: "+(System.currentTimeMillis()-start));
}
@Test
public void testHappyWriteLock(){
long start = System.currentTimeMillis();
String key = "123";
// First get the lock-precursor token
String precursorToken = exclusiveOrSharedSemaphoreDao.acquireExclusiveLockPrecursor(key);
assertNotNull(precursorToken);
// Use it to get the actual token
String lockToken = exclusiveOrSharedSemaphoreDao.acquireExclusiveLock(key, precursorToken, 1000);
assertNotNull(lockToken);
// We should be able to release the lock
exclusiveOrSharedSemaphoreDao.releaseExclusiveLock(key, lockToken);
System.out.println("Exclusive lock timing: "+(System.currentTimeMillis()-start));
// We should now be able to get the lock again
// First get the lock-precursor token
precursorToken = exclusiveOrSharedSemaphoreDao.acquireExclusiveLockPrecursor(key);
assertNotNull(precursorToken);
// Use it to get the actual token
lockToken = exclusiveOrSharedSemaphoreDao.acquireExclusiveLock(key, precursorToken, 1000);
// We should be able to release the lock
exclusiveOrSharedSemaphoreDao.releaseExclusiveLock(key, lockToken);
}
@Test
public void testAcquireSharedLockWithOutstandingWritePrecursor() throws InterruptedException{
// first get a read lock.
String key = "123";
// Should be able to get a read lock
String readLockToken = exclusiveOrSharedSemaphoreDao.acquireSharedLock(key, 1000);
assertNotNull(readLockToken);
// Now acquire the write-lock-precursor
String writeLockPrecursor = exclusiveOrSharedSemaphoreDao.acquireExclusiveLockPrecursor(key);
assertNotNull(writeLockPrecursor);
// Now we should not be should not be able to get a new read lock
try{
exclusiveOrSharedSemaphoreDao.acquireSharedLock(key, 1000);
fail("Attempting to get a new read-lock when there is an outstanding write-lock-precursor should have failed.");
}catch(LockUnavilableException e){
// expected
}
// Now let the precursor expire and try again.
Thread.sleep(ExclusiveOrSharedSemaphoreDaoImpl.WRITE_LOCK_PRECURSOR_TIMEOUT+10);
// This time it should work
String readLockTwo = exclusiveOrSharedSemaphoreDao.acquireSharedLock(key, 1000);
assertNotNull(readLockTwo);
}
@Test
public void testAcquireSharedLockWithOutstandingWriteLock() throws InterruptedException{
// first get a read lock.
String key = "123";
// Should be able to get a read lock
String readLockToken = exclusiveOrSharedSemaphoreDao.acquireSharedLock(key, 1000);
assertNotNull(readLockToken);
// Now acquire the write-lock-precursor
String writeLockPrecursor = exclusiveOrSharedSemaphoreDao.acquireExclusiveLockPrecursor(key);
assertNotNull(writeLockPrecursor);
// Now attempt to acquire the actual write-lock.
String writeLockToken = exclusiveOrSharedSemaphoreDao.acquireExclusiveLock(key, writeLockPrecursor, 1000);
assertEquals("Should not be able to get the actual write-lock when there is an outstanding read-lock",null, writeLockToken);
// Release the read-lock so we can get the write-lock
exclusiveOrSharedSemaphoreDao.releaseSharedLock(key, readLockToken);
// Now get the write-lock
writeLockToken = exclusiveOrSharedSemaphoreDao.acquireExclusiveLock(key, writeLockPrecursor, 1000);
assertNotNull("Should have been able to get the actual write-lock as there are no more outstanding read-lock", writeLockToken);
// Now we should not be should not be able to get a new read lock
try{
exclusiveOrSharedSemaphoreDao.acquireSharedLock(key, 1000);
fail("Attempting to get a new read-lock when there is an outstanding write-lock should have failed.");
}catch(LockUnavilableException e){
// expected
}
// Now release the write lock and try again
exclusiveOrSharedSemaphoreDao.releaseExclusiveLock(key, writeLockToken);
// This time it should work
String readLockTwo = exclusiveOrSharedSemaphoreDao.acquireSharedLock(key, 1000);
assertNotNull(readLockTwo);
}
@Test
public void testAcquireSecondWriteLockPrecursor() throws InterruptedException{
String key = "123";
// Now acquire the write-lock-precursor
String writeLockPrecursor = exclusiveOrSharedSemaphoreDao.acquireExclusiveLockPrecursor(key);
assertNotNull(writeLockPrecursor);
// Trying to get a precursor again should fail
try{
exclusiveOrSharedSemaphoreDao.acquireExclusiveLockPrecursor(key);
fail("Attempting to get a second write-lock-precursor should fail when on is already outstanding.");
}catch(LockUnavilableException e){
// expected
}
// Now let the precursor expire and try again.
Thread.sleep(ExclusiveOrSharedSemaphoreDaoImpl.WRITE_LOCK_PRECURSOR_TIMEOUT+10);
// This time it should work
writeLockPrecursor = exclusiveOrSharedSemaphoreDao.acquireExclusiveLockPrecursor(key);
assertNotNull(writeLockPrecursor);
}
@Test
public void testForcedReadLockRelease() throws InterruptedException{
String key = "123";
long timeoutOne = 2000;
long timeoutTwo = 4000;
long maxWaitMS = (timeoutOne+timeoutTwo)*2;
// Get two read locks on that expires after two seconds and another the expires after 4
String readLockOne = exclusiveOrSharedSemaphoreDao.acquireSharedLock(key, timeoutOne);
assertNotNull(readLockOne);
String readLockTwo = exclusiveOrSharedSemaphoreDao.acquireSharedLock(key, timeoutTwo);
assertNotNull(readLockTwo);
// Get the precursor
String writeLockPrecursor = exclusiveOrSharedSemaphoreDao.acquireExclusiveLockPrecursor(key);
assertNotNull(writeLockPrecursor);
long start = System.currentTimeMillis();
String writeLock = null;
do{
// try to get writeLock
writeLock = exclusiveOrSharedSemaphoreDao.acquireExclusiveLock(key, writeLockPrecursor, 1000);
assertTrue("Timed-out waiting for read-locks to expire", (System.currentTimeMillis()-start) < maxWaitMS);
if(writeLock == null){
System.out.println("Waiting for read-locks to expire...");
Thread.sleep(1000);
}
}while(writeLock == null);
// We should now have the write lock
assertNotNull(writeLock);
}
@Test
public void testForcedWriteLockRelease() throws InterruptedException{
String key = "123";
long timeoutOne = 4000;
long maxWaitMS = timeoutOne*2;
// First acquire a precursor
String writeLockPrecursor = exclusiveOrSharedSemaphoreDao.acquireExclusiveLockPrecursor(key);
assertNotNull(writeLockPrecursor);
String writeLock = exclusiveOrSharedSemaphoreDao.acquireExclusiveLock(key, writeLockPrecursor, timeoutOne);
assertNotNull(writeLock);
long start = System.currentTimeMillis();
String readLock = null;
do{
// try to get writeLock
try {
readLock = exclusiveOrSharedSemaphoreDao.acquireSharedLock(key, timeoutOne);
} catch (LockUnavilableException e) {
// This will occur as long as the write-lock is active.
readLock = null;
}
assertTrue("Timed-out waiting for write-locks to expire", (System.currentTimeMillis()-start) < maxWaitMS);
if(readLock == null){
System.out.println("Waiting for write-locks to expire...");
Thread.sleep(1000);
}
}while(readLock == null);
// We should now have the write lock
assertNotNull(readLock);
}
}
|
package org.parallelj.ssh.publickey;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.math.BigInteger;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PublicKey;
import java.security.spec.DSAPublicKeySpec;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.RSAPublicKeySpec;
import org.apache.sshd.server.PublickeyAuthenticator;
import org.apache.sshd.server.session.ServerSession;
import org.bouncycastle.util.encoders.Base64;
public class URLPublicKeyAuthentificator implements PublickeyAuthenticator {
private byte[] bytes;
private int pos;
private String authorizedKeysFile;
public URLPublicKeyAuthentificator(String authorizedKeysFile) {
this.authorizedKeysFile = authorizedKeysFile;
}
@Override
public boolean authenticate(String username, PublicKey key,
ServerSession session) {
String strLine;
boolean isOk = false;
try {
// Read Public Key.
File filePublicKey = new File(this.authorizedKeysFile);
FileReader fr = new FileReader(filePublicKey);
BufferedReader br = new BufferedReader(fr);
while ((strLine = br.readLine()) != null && !isOk) {
PublicKey pKey = decodePublicKey(strLine);
byte[] keyBytes = key.getEncoded();
byte[] pKeyBytes = pKey.getEncoded();
if (keyBytes == null || pKeyBytes == null
|| keyBytes.length != pKeyBytes.length) {
isOk = false;
continue;
}
isOk = true;
for (int i = 0; i < keyBytes.length; i++) {
if (keyBytes[i] != pKeyBytes[i]) {
isOk = false;
continue;
}
}
}
} catch (Exception e) {
ExtensionSshMessageKind.ISH0002.format(e);
isOk = false;
}
return isOk;
}
private String decodeType() {
int len = decodeInt();
String type = new String(bytes, pos, len);
pos += len;
return type;
}
public PublicKey decodePublicKey(String keyLine) throws IllegalArgumentException, InvalidKeySpecException, NoSuchAlgorithmException {
bytes = null;
pos = 0;
// look for the Base64 encoded part of the line to decode
// both ssh-rsa and ssh-dss begin with "AAAA" due to the length bytes
for (String part : keyLine.split(" ")) {
if (part.startsWith("AAAA")) {
byte[] bytePart = part.getBytes();
bytes = Base64.decode(bytePart);
break;
}
}
if (bytes == null) {
throw new IllegalArgumentException("no Base64 part to decode");
}
String type = decodeType();
if (type.equals("ssh-rsa")) {
BigInteger e = decodeBigInt();
BigInteger m = decodeBigInt();
RSAPublicKeySpec spec = new RSAPublicKeySpec(m, e);
return KeyFactory.getInstance("RSA").generatePublic(spec);
} else if (type.equals("ssh-dss")) {
BigInteger p = decodeBigInt();
BigInteger q = decodeBigInt();
BigInteger g = decodeBigInt();
BigInteger y = decodeBigInt();
DSAPublicKeySpec spec = new DSAPublicKeySpec(y, p, q, g);
return KeyFactory.getInstance("DSA").generatePublic(spec);
} else {
throw new IllegalArgumentException("unknown type " + type);
}
}
private BigInteger decodeBigInt() {
int len = decodeInt();
byte[] bigIntBytes = new byte[len];
System.arraycopy(bytes, pos, bigIntBytes, 0, len);
pos += len;
return new BigInteger(bigIntBytes);
}
private int decodeInt() {
return ((bytes[pos++] & 0xFF) << 24) | ((bytes[pos++] & 0xFF) << 16)
| ((bytes[pos++] & 0xFF) << 8) | (bytes[pos++] & 0xFF);
}
}
|
package com.codenvy.ide.factory.client.utils;
import com.codenvy.ide.factory.client.FactoryLocalizationConstant;
import com.codenvy.ide.factory.client.accept.Authenticator;
import com.codenvy.ide.factory.shared.Constants;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.inject.Inject;
import org.eclipse.che.api.core.UnauthorizedException;
import org.eclipse.che.api.core.rest.shared.dto.ServiceError;
import org.eclipse.che.api.factory.shared.dto.Factory;
import org.eclipse.che.api.project.gwt.client.ProjectServiceClient;
import org.eclipse.che.api.project.shared.dto.ProjectDescriptor;
import org.eclipse.che.api.workspace.shared.dto.ProjectConfigDto;
import org.eclipse.che.ide.api.notification.Notification;
import org.eclipse.che.ide.api.project.wizard.ImportProjectNotificationSubscriber;
import org.eclipse.che.ide.dto.DtoFactory;
import org.eclipse.che.ide.ext.ssh.client.SshKeyService;
import org.eclipse.che.ide.rest.AsyncRequestCallback;
import org.eclipse.che.ide.rest.DtoUnmarshallerFactory;
import org.eclipse.che.ide.ui.dialogs.DialogFactory;
import org.eclipse.che.ide.websocket.rest.RequestCallback;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static com.codenvy.ide.factory.client.accept.Authenticator.AuthCallback;
/**
* @author Sergii Leschenko
* @author Valeriy Svydenko
*/
public class FactoryProjectImporter {
private final ProjectServiceClient projectServiceClient;
private final DtoUnmarshallerFactory dtoUnmarshallerFactory;
private final FactoryLocalizationConstant localization;
private final DtoFactory dtoFactory;
private final Authenticator authenticator;
private final DialogFactory dialogFactory;
private final SshKeyService sshKeyService;
private final ImportProjectNotificationSubscriber notificationSubscriber;
private Factory factory;
private Notification notification;
private AsyncCallback<ProjectDescriptor> callback;
@Inject
public FactoryProjectImporter(ProjectServiceClient projectServiceClient,
DtoUnmarshallerFactory dtoUnmarshallerFactory,
FactoryLocalizationConstant localization,
Authenticator authenticator,
DtoFactory dtoFactory,
DialogFactory dialogFactory,
SshKeyService sshKeyService,
ImportProjectNotificationSubscriber notificationSubscriber) {
this.projectServiceClient = projectServiceClient;
this.dtoUnmarshallerFactory = dtoUnmarshallerFactory;
this.localization = localization;
this.dtoFactory = dtoFactory;
this.authenticator = authenticator;
this.dialogFactory = dialogFactory;
this.sshKeyService = sshKeyService;
this.notificationSubscriber = notificationSubscriber;
}
public void startImporting(Notification notification, Factory factory, AsyncCallback<ProjectDescriptor> callback) {
this.callback = callback;
this.notification = notification;
this.factory = factory;
importProjects();
}
/**
* Imports source to project
*/
private void importProjects() {
for (ProjectConfigDto projectConfig : factory.getWorkspace().getProjects()) {
notification.setMessage(localization.cloningSource());
notificationSubscriber.subscribe(projectConfig.getName(), notification);
projectServiceClient.importProject(projectConfig.getName(), true, projectConfig.getSource(),
new RequestCallback<Void>() {
@Override
protected void onSuccess(Void result) {
// if (importedProject.getProjectDescriptor().getMixins()
// .contains(Constants.FACTORY_PROJECT_TYPE_ID)) {
// updateProjectAttributes(importedProject.getProjectDescriptor());
// } else {
// callback.onSuccess(importedProject.getProjectDescriptor());
//update poject
}
@Override
protected void onFailure(Throwable exception) {
if (exception instanceof UnauthorizedException) {
rerunWithAuthImport(projectConfig.getSource().getLocation());
} else {
callback.onFailure(
new Exception("Unable to import source of project. " + dtoFactory
.createDtoFromJson(exception.getMessage(), ServiceError.class)
.getMessage()));
}
}
});
}
}
private void rerunWithAuthImport(String location) {
notification.setMessage(localization.needToAuthorizeBeforeAcceptMessage());
authenticator.showOAuthWindow(location,
new Authenticator.AuthCallback() {
@Override
public void onAuthenticated() {
notification.setMessage(localization.oauthSuccess());
importProjects();
}
@Override
public void onError(String message) {
notification.setMessage(localization.oauthFailed() + " " + message);
notification.setType(Notification.Type.ERROR);
notification.setStatus(Notification.Status.FINISHED);
}
});
}
private void updateProjectAttributes(ProjectDescriptor projectDescriptor) {
Map<String, List<String>> attributes = projectDescriptor.getAttributes();
attributes.put(Constants.FACTORY_ID_ATTRIBUTE_NAME, Collections.singletonList(factory.getId()));
ProjectConfigDto update = dtoFactory.createDto(ProjectConfigDto.class)
.withType(projectDescriptor.getType())
.withMixins(projectDescriptor.getMixins())
.withAttributes(attributes)
.withPath(projectDescriptor.getContentRoot())
.withMixinTypes(projectDescriptor.getMixins())
.withContentRoot(projectDescriptor.getContentRoot())
.withMixins(projectDescriptor.getMixins())
.withDescription(projectDescriptor.getDescription());
projectServiceClient.updateProject(projectDescriptor.getPath(), update,
new AsyncRequestCallback<ProjectDescriptor>(
dtoUnmarshallerFactory.newUnmarshaller(ProjectDescriptor.class)) {
@Override
protected void onSuccess(ProjectDescriptor projectDescriptor) {
callback.onSuccess(projectDescriptor);
}
@Override
protected void onFailure(Throwable exception) {
callback.onFailure(new Exception("Unable to set properties of project. " + dtoFactory
.createDtoFromJson(exception.getMessage(), ServiceError.class)
.getMessage()));
}
}
);
}
public Factory getFactory() {
return factory;
}
}
|
package com.github.TKnudsen.timeseries.operations.preprocessing.univariate.normalization;
import java.util.Arrays;
import java.util.List;
import com.github.TKnudsen.ComplexDataObject.model.processors.IDataProcessor;
import com.github.TKnudsen.ComplexDataObject.model.processors.complexDataObject.DataProcessingCategory;
import com.github.TKnudsen.ComplexDataObject.model.tools.MathFunctions;
import com.github.TKnudsen.timeseries.data.univariate.ITimeSeriesUnivariate;
import com.github.TKnudsen.timeseries.operations.preprocessing.univariate.ITimeSeriesUnivariatePreprocessor;
import com.github.TKnudsen.timeseries.operations.tools.TimeSeriesTools;
public class MinMaxNormalization implements ITimeSeriesUnivariatePreprocessor {
private boolean globalMinMax;
private double globalMin = Double.NaN;
private double globalMax = Double.NaN;
public MinMaxNormalization() {
this.globalMinMax = false;
}
public MinMaxNormalization(boolean globalMinMax) {
this.globalMinMax = globalMinMax;
}
public MinMaxNormalization(double globalMin, double globalMax) {
this.globalMinMax = true;
this.globalMin = globalMin;
this.globalMax = globalMax;
}
@Override
public void process(List<ITimeSeriesUnivariate> data) {
double globalMin = Double.POSITIVE_INFINITY;
double globalMax = Double.NEGATIVE_INFINITY;
if (!Double.isNaN(this.globalMin) && !Double.isNaN(this.globalMax)) {
globalMin = this.globalMin;
globalMax = this.globalMax;
} else if (globalMinMax)
for (ITimeSeriesUnivariate timeSeries : data) {
globalMin = Math.min(globalMin, TimeSeriesTools.getMinValue(timeSeries));
globalMax = Math.max(globalMax, TimeSeriesTools.getMaxValue(timeSeries));
}
for (ITimeSeriesUnivariate timeSeries : data) {
double min = TimeSeriesTools.getMinValue(timeSeries);
double max = TimeSeriesTools.getMaxValue(timeSeries);
for (int i = 0; i < timeSeries.size(); i++)
if (globalMinMax) // also true when this.globalmin/max are set
timeSeries.replaceValue(i, MathFunctions.linearScale(globalMin, globalMax, timeSeries.getValue(i)));
else
timeSeries.replaceValue(i, MathFunctions.linearScale(min, max, timeSeries.getValue(i)));
}
}
@Override
public DataProcessingCategory getPreprocessingCategory() {
return DataProcessingCategory.DATA_NORMALIZATION;
}
public boolean isGlobalMinMax() {
return globalMinMax;
}
public void setGlobalMinMax(boolean globalMinMax) {
this.globalMinMax = globalMinMax;
}
@Override
public List<IDataProcessor<ITimeSeriesUnivariate>> getAlternativeParameterizations(int count) {
return Arrays.asList(new MinMaxNormalization(!globalMinMax));
}
@Override
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof MinMaxNormalization))
return false;
MinMaxNormalization other = (MinMaxNormalization) o;
if (other.globalMinMax != globalMinMax)
return false;
if (!Double.isNaN(other.globalMin) && !Double.isNaN(globalMin)) {
if (other.globalMin != globalMin)
return false;
} else if (!Double.isNaN(other.globalMin) && Double.isNaN(globalMin))
return false;
else if (Double.isNaN(other.globalMin) && !Double.isNaN(globalMin))
return false;
if (!Double.isNaN(other.globalMax) && !Double.isNaN(globalMax)) {
if (other.globalMax != globalMax)
return false;
} else if (!Double.isNaN(other.globalMax) && Double.isNaN(globalMax))
return false;
else if (Double.isNaN(other.globalMax) && !Double.isNaN(globalMax))
return false;
return true;
}
}
|
package dk.statsbiblioteket.mediaplatform.workflowstatemonitor;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import java.util.Date;
import java.util.List;
/**
* This class is annotated to be exposed as a REST webservice.
*/
@Path("/")
public class HibernatedStateManagerWebservice extends HibernatedStateManager {
@Override
@POST
@Path("states/{entityName}/")
@Consumes("text/xml")
public void addState(@PathParam("entityName") String entityName, State state) {
super.addState(entityName,
state);
}
@Override
@GET
@Path("entities/")
@Produces({"text/xml", "application/json"})
public List<Entity> listEntities() {
return super.listEntities();
}
@GET
@Path("states/{entityName}/")
@Produces({"text/xml", "application/json"})
public List<State> listStates(@PathParam("entityName") String entityName,
@QueryParam("onlyLast") boolean onlyLast,
@QueryParam("includes") List<String> includes,
@QueryParam("excludes") List<String> excludes,
@QueryParam("startDate") String startDateString,
@QueryParam("endDate") String endDateString) {
Date startDate = null;
if (startDateString != null && !startDateString.isEmpty()) {
startDate = javax.xml.bind.DatatypeConverter.parseDateTime(startDateString).getTime();
}
Date endDate = null;
if (endDateString != null && !endDateString.isEmpty()) {
endDate = javax.xml.bind.DatatypeConverter.parseDateTime(endDateString).getTime();
}
return super.listStates(entityName, onlyLast, includes, excludes, startDate,
endDate);
}
@GET
@Path("states/")
@Produces({"text/xml", "application/json"})
public List<State> listStates(@QueryParam("onlyLast") boolean onlyLast,
@QueryParam("includes") List<String> includes,
@QueryParam("excludes") List<String> excludes,
@QueryParam("startDate") String startDateString,
@QueryParam("endDate") String endDateString) {
Date startDate = null;
if (startDateString != null && !startDateString.isEmpty()) {
startDate = javax.xml.bind.DatatypeConverter.parseDateTime(startDateString).getTime();
}
Date endDate = null;
if (endDateString != null && !endDateString.isEmpty()) {
endDate = javax.xml.bind.DatatypeConverter.parseDateTime(endDateString).getTime();
}
return super.listStates(onlyLast, includes, excludes, startDate,
endDate);
}
}
|
package quizum.app.ui;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.SwingConstants;
import javax.swing.border.EmptyBorder;
import quizum.QuizumUtils;
import quizum.beans.Question;
import quizum.beans.UserInfo;
public class QuizFrame extends JFrame {
private List<Question> questionList;
private JLabel imageLabel, label;
private UserInfo userInfo;
private int iterator;
private List<QuestionPanel> questionPanelList;
private JButton buttonNext;
private JButton buttonPrev;
public QuizFrame(String fileName, UserInfo userInfo) {
if (fileName == null || fileName.isEmpty()) {
JOptionPane.showMessageDialog(null, "Nie można odczytać konfiguracji", "Błąd konfiguracji", JOptionPane.ERROR_MESSAGE);
System.exit(0);
}
questionList = QuizumUtils.loadQuestionList(new File(fileName));
this.userInfo = userInfo;
initialize();
}
private void initialize() {
setTitle("Quizum");
setLocation(100, 100);
setMinimumSize(new Dimension(1200, 700));
setMaximumSize(new Dimension(1400, 900));
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
getContentPane().setLayout(new BorderLayout(10, 10));
iterator = -1;
JPanel imagePanel = new JPanel();
imagePanel.setBorder(new EmptyBorder(5, 5, 5, 5));
imageLabel = new JLabel();
imagePanel.setLayout(new FlowLayout(FlowLayout.CENTER, 0, 0));
imagePanel.add(imageLabel);
questionPanelList = new ArrayList<QuestionPanel>();
questionList.forEach(question -> questionPanelList.add(new QuestionPanel(question)));
Collections.shuffle(questionPanelList);
int width = questionPanelList.stream().max((qPanel1, qPanel2) -> Integer.compare(qPanel1.getSize().width, qPanel2.getSize().width)).get().getWidth();
int height = questionPanelList.stream().max((qPanel1, qPanel2) -> Integer.compare(qPanel1.getSize().height, qPanel2.getSize().height)).get().getHeight();
setSize(width, height);
JPanel panel_1 = new JPanel();
panel_1.setLayout(new GridLayout(1, 3, 0, 5));
panel_1.add(new JPanel());
JPanel panel_2 = new JPanel();
panel_1.add(panel_2);
buttonPrev = new JButton("Poprzednie");
buttonPrev.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
QuestionPanel panel = getPrevQuestionPanel();
if(panel != null){
changePanel(panel);
}
}
});
panel_2.add(buttonPrev);
label = new JLabel("");
label.setHorizontalTextPosition(SwingConstants.CENTER);
panel_2.add(label);
buttonNext = new JButton("Następne");
buttonNext.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
QuestionPanel panel = getNextQuestionPanel();
if (panel != null) {
changePanel(panel);
}
}
});
panel_2.add(buttonNext);
changePanel(getNextQuestionPanel());
JPanel panel_3 = new JPanel();
FlowLayout flowLayout = (FlowLayout) panel_3.getLayout();
flowLayout.setHgap(20);
flowLayout.setAlignment(FlowLayout.RIGHT);
panel_1.add(panel_3);
JButton btnNewButton_1 = new JButton("Zakończ");
btnNewButton_1.setHorizontalAlignment(SwingConstants.RIGHT);
btnNewButton_1.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
questionPanelList.forEach(questionPanel -> {
userInfo.resolveQuestion(questionPanel.question,
questionPanel.getAnswer() != null ? questionPanel.getAnswer() : null);
});
QuizumUtils.generateReport(userInfo);
JOptionPane.showMessageDialog(null, "Poprawnych odpowiedzi: "+userInfo.getTotal()+" z "+questionPanelList.size()+" ("+(int)((double)userInfo.getTotal()/questionPanelList.size()*100)+"%)", "Wynik", JOptionPane.INFORMATION_MESSAGE);
System.exit(0);
}
});
panel_3.add(btnNewButton_1);
getContentPane().add(panel_1, BorderLayout.SOUTH);
getContentPane().add(imagePanel, BorderLayout.EAST);
setVisible(true);
}
public void changePanel(QuestionPanel panel) {
if (panel == null)
return;
if (getContentPane().getComponents().length > 0)
getContentPane().remove(0);
getContentPane().add(panel, 0);
if (panel.question.getPictureFileName() != null) {
setQuestionImage(QuizumUtils.getImageByFilename(panel.question.getPictureFileName()));
} else {
setQuestionImage(null);
}
label.setText((iterator+1)+" / "+questionPanelList.size());
if(iterator > 0) {
revalidateButton(buttonPrev, true);
} else {
revalidateButton(buttonPrev, false);
}
if(iterator+1 < questionPanelList.size()) {
revalidateButton(buttonNext, true);
} else {
revalidateButton(buttonNext, false);
}
revalidate();
repaint();
pack();
}
public QuestionPanel getNextQuestionPanel() {
if (questionPanelList.size() - 1 > iterator) {
iterator++;
return questionPanelList.get(iterator);
}
return null;
}
public QuestionPanel getPrevQuestionPanel() {
if (iterator > 0) {
iterator
return questionPanelList.get(iterator);
}
return null;
}
private void revalidateButton(JButton button, boolean toEnable) {
if((button.isEnabled() && !toEnable) || (!button.isEnabled() && toEnable)) {
button.setEnabled(toEnable);
}
}
private void setQuestionImage(ImageIcon icon) {
if (icon != null) {
icon = QuizumUtils.fitIcon(icon, 600, getHeight());
}
imageLabel.setIcon(icon);
imageLabel.repaint();
}
}
|
package org.eclipse.birt.report.designer.internal.ui.dialogs;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.birt.core.data.ExpressionUtil;
import org.eclipse.birt.core.exception.BirtException;
import org.eclipse.birt.data.engine.api.aggregation.AggregationManager;
import org.eclipse.birt.data.engine.api.aggregation.IAggrFunction;
import org.eclipse.birt.data.engine.api.aggregation.IParameterDefn;
import org.eclipse.birt.report.data.adapter.api.AdapterException;
import org.eclipse.birt.report.data.adapter.api.DataAdapterUtil;
import org.eclipse.birt.report.designer.core.model.SessionHandleAdapter;
import org.eclipse.birt.report.designer.data.ui.dataset.DataSetUIUtil;
import org.eclipse.birt.report.designer.data.ui.util.DataUtil;
import org.eclipse.birt.report.designer.internal.ui.dialogs.expression.ExpressionButton;
import org.eclipse.birt.report.designer.internal.ui.extension.ExtendedDataModelUIAdapterHelper;
import org.eclipse.birt.report.designer.internal.ui.extension.IExtendedDataModelUIAdapter;
import org.eclipse.birt.report.designer.internal.ui.swt.custom.CLabel;
import org.eclipse.birt.report.designer.internal.ui.util.ExceptionHandler;
import org.eclipse.birt.report.designer.internal.ui.util.ExpressionButtonUtil;
import org.eclipse.birt.report.designer.internal.ui.util.UIUtil;
import org.eclipse.birt.report.designer.internal.ui.util.WidgetUtil;
import org.eclipse.birt.report.designer.nls.Messages;
import org.eclipse.birt.report.designer.ui.ReportPlatformUIImages;
import org.eclipse.birt.report.designer.ui.dialogs.BindingExpressionProvider;
import org.eclipse.birt.report.designer.ui.views.attributes.providers.ChoiceSetFactory;
import org.eclipse.birt.report.designer.util.AlphabeticallyComparator;
import org.eclipse.birt.report.designer.util.DEUtil;
import org.eclipse.birt.report.model.api.AggregationArgumentHandle;
import org.eclipse.birt.report.model.api.CachedMetaDataHandle;
import org.eclipse.birt.report.model.api.ComputedColumnHandle;
import org.eclipse.birt.report.model.api.DataItemHandle;
import org.eclipse.birt.report.model.api.DataSetHandle;
import org.eclipse.birt.report.model.api.DesignElementHandle;
import org.eclipse.birt.report.model.api.Expression;
import org.eclipse.birt.report.model.api.ExpressionHandle;
import org.eclipse.birt.report.model.api.ExpressionType;
import org.eclipse.birt.report.model.api.GridHandle;
import org.eclipse.birt.report.model.api.GroupHandle;
import org.eclipse.birt.report.model.api.IResourceLocator;
import org.eclipse.birt.report.model.api.LibraryHandle;
import org.eclipse.birt.report.model.api.ListGroupHandle;
import org.eclipse.birt.report.model.api.ListHandle;
import org.eclipse.birt.report.model.api.ListingHandle;
import org.eclipse.birt.report.model.api.ReportItemHandle;
import org.eclipse.birt.report.model.api.StructureFactory;
import org.eclipse.birt.report.model.api.TableGroupHandle;
import org.eclipse.birt.report.model.api.TableHandle;
import org.eclipse.birt.report.model.api.activity.SemanticException;
import org.eclipse.birt.report.model.api.elements.DesignChoiceConstants;
import org.eclipse.birt.report.model.api.elements.structures.AggregationArgument;
import org.eclipse.birt.report.model.api.elements.structures.ComputedColumn;
import org.eclipse.birt.report.model.api.elements.structures.ResultSetColumn;
import org.eclipse.birt.report.model.api.metadata.IChoice;
import org.eclipse.birt.report.model.api.metadata.IChoiceSet;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.window.Window;
import org.eclipse.swt.SWT;
import org.eclipse.swt.accessibility.AccessibleAdapter;
import org.eclipse.swt.accessibility.AccessibleEvent;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.events.TraverseEvent;
import org.eclipse.swt.events.TraverseListener;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Layout;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.ISharedImages;
import org.eclipse.ui.PlatformUI;
public class BindingDialogHelper extends AbstractBindingDialogHelper
{
protected static final String NAME = Messages.getString( "BindingDialogHelper.text.Name" ); //$NON-NLS-1$
protected static final String DATA_TYPE = Messages.getString( "BindingDialogHelper.text.DataType" ); //$NON-NLS-1$
protected static final String FUNCTION = Messages.getString( "BindingDialogHelper.text.Function" ); //$NON-NLS-1$
protected static final String DATA_FIELD = Messages.getString( "BindingDialogHelper.text.DataField" ); //$NON-NLS-1$
protected static final String FILTER_CONDITION = Messages.getString( "BindingDialogHelper.text.Filter" ); //$NON-NLS-1$
protected static final String AGGREGATE_ON = Messages.getString( "BindingDialogHelper.text.AggOn" ); //$NON-NLS-1$
protected static final String TABLE = Messages.getString( "BindingDialogHelper.text.Table" ); //$NON-NLS-1$
protected static final String LIST = Messages.getString( "BindingDialogHelper.text.List" ); //$NON-NLS-1$
protected static final String GRID = Messages.getString( "BindingDialogHelper.text.Grid" ); //$NON-NLS-1$
protected static final String GROUP = Messages.getString( "BindingDialogHelper.text.Group" ); //$NON-NLS-1$
protected static final String EXPRESSION = Messages.getString( "BindingDialogHelper.text.Expression" ); //$NON-NLS-1$
protected static final String DISPLAY_NAME = Messages.getString( "BindingDialogHelper.text.displayName" ); //$NON-NLS-1$
protected static final String ALLOW_EXPORT_LABEL = Messages.getString( "BindingDialogHelper.text.allowExport" ); //$NON-NLS-1$
protected static final String ALLOW_EXPORT_BUTTON = Messages.getString( "BindingDialogHelper.text.allowExport.button" ); //$NON-NLS-1$
protected static final String DISPLAY_NAME_ID = Messages.getString( "BindingDialogHelper.text.displayNameID" ); //$NON-NLS-1$
protected static final String DEFAULT_ITEM_NAME = Messages.getString( "BindingDialogHelper.bindingName.dataitem" ); //$NON-NLS-1$
protected static final String DEFAULT_AGGREGATION_NAME = Messages.getString( "BindingDialogHelper.bindingName.aggregation" ); //$NON-NLS-1$
protected static final String NAME_LABEL = Messages.getString( "BindingDialogHelper.error.text.Name" ); //$NON-NLS-1$
protected static final IChoiceSet DATA_TYPE_CHOICE_SET = DEUtil.getMetaDataDictionary( )
.getStructure( ComputedColumn.COMPUTED_COLUMN_STRUCT )
.getMember( ComputedColumn.DATA_TYPE_MEMBER )
.getAllowedChoices( );
protected static final IChoice[] DATA_TYPE_CHOICES = DATA_TYPE_CHOICE_SET.getChoices( null );
protected String[] dataTypes = ChoiceSetFactory.getDisplayNamefromChoiceSet( DATA_TYPE_CHOICE_SET );
protected Button btnTable;
private Text txtName, txtFilter, txtExpression;
private Combo cmbType, cmbFunction, cmbGroup;
private Button btnGroup, btnDisplayNameID, btnRemoveDisplayNameID;
private Composite paramsComposite;
private Map<String, Control> paramsMap = new LinkedHashMap<String, Control>( );
private Map<String, String[]> paramsValueMap = new HashMap<String, String[]>( );
private Composite composite;
private Text txtDisplayName, txtDisplayNameID;
private ComputedColumn newBinding;
private CLabel messageLine;
private Combo cmbName, cmbDataField;
private Label lbName, lbDisplayNameID;
private boolean isCreate;
private boolean isRef;
private Object container;
private static final String EMPTY_STRING = ""; //$NON-NLS-1$
public void createContent( Composite parent )
{
isCreate = getBinding( ) == null;
isRef = getBindingHolder( ).getDataBindingType( ) == ReportItemHandle.DATABINDING_TYPE_REPORT_ITEM_REF;
composite = parent;
( (GridLayout) composite.getLayout( ) ).numColumns = 4;
lbName = new Label( composite, SWT.NONE );
lbName.setText( NAME );
GridData gd = new GridData( GridData.FILL_HORIZONTAL );
gd.horizontalSpan = 3;
gd.widthHint = 200;
if ( isRef )
{
cmbName = new Combo( composite, SWT.BORDER | SWT.READ_ONLY );
cmbName.setLayoutData( gd );
cmbName.setVisibleItemCount( 30 );
cmbName.addSelectionListener( new SelectionAdapter( ) {
public void widgetSelected( SelectionEvent e )
{
modifyDialogContent( );
String bindingName = cmbName.getItem( cmbName.getSelectionIndex( ) );
for ( Iterator iterator = getBindingHolder( ).getDataBindingReference( )
.getColumnBindings( )
.iterator( ); iterator.hasNext( ); )
{
ComputedColumnHandle computedColumn = (ComputedColumnHandle) iterator.next( );
if ( computedColumn.getName( ).equals( bindingName ) )
{
setBinding( computedColumn );
initDialog( );
return;
}
}
}
} );
}
else
{
txtName = new Text( composite, SWT.BORDER );
txtName.setLayoutData( gd );
txtName.addModifyListener( new ModifyListener( ) {
public void modifyText( ModifyEvent e )
{
modifyDialogContent( );
validate( );
}
} );
}
// WidgetUtil.createGridPlaceholder( composite, 1, false );
lbDisplayNameID = new Label( composite, SWT.NONE );
lbDisplayNameID.setText( DISPLAY_NAME_ID );
lbDisplayNameID.addTraverseListener( new TraverseListener( ) {
public void keyTraversed( TraverseEvent e )
{
if ( e.detail == SWT.TRAVERSE_MNEMONIC && e.doit )
{
e.detail = SWT.TRAVERSE_NONE;
if ( btnDisplayNameID.isEnabled( ) )
{
openKeySelectionDialog( );
}
}
}
} );
txtDisplayNameID = new Text( composite, SWT.BORDER | SWT.READ_ONLY );
txtDisplayNameID.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
btnDisplayNameID = new Button( composite, SWT.NONE );
btnDisplayNameID.setEnabled( getAvailableResourceUrls( ) != null
&& getAvailableResourceUrls( ).length > 0 ? true : false );
btnDisplayNameID.setText( "..." ); //$NON-NLS-1$
btnDisplayNameID.setToolTipText( Messages.getString( "ResourceKeyDescriptor.button.browse.tooltip" ) ); //$NON-NLS-1$
btnDisplayNameID.addSelectionListener( new SelectionAdapter( ) {
public void widgetSelected( SelectionEvent event )
{
openKeySelectionDialog( );
}
} );
btnRemoveDisplayNameID = new Button( composite, SWT.NONE );
btnRemoveDisplayNameID.setImage( ReportPlatformUIImages.getImage( ISharedImages.IMG_TOOL_DELETE ) );
btnRemoveDisplayNameID.setToolTipText( Messages.getString( "ResourceKeyDescriptor.button.reset.tooltip" ) ); //$NON-NLS-1$
btnRemoveDisplayNameID.addSelectionListener( new SelectionAdapter( ) {
public void widgetSelected( SelectionEvent event )
{
txtDisplayNameID.setText( EMPTY_STRING );
txtDisplayName.setText( EMPTY_STRING );
modifyDialogContent( );
updateRemoveBtnState( );
}
} );
new Label( composite, SWT.NONE ).setText( DISPLAY_NAME );
txtDisplayName = new Text( composite, SWT.BORDER );
txtDisplayName.addModifyListener( new ModifyListener( ) {
public void modifyText( ModifyEvent e )
{
modifyDialogContent( );
}
} );
txtDisplayName.setLayoutData( gd );
// WidgetUtil.createGridPlaceholder( composite, 1, false );
new Label( composite, SWT.NONE ).setText( DATA_TYPE );
cmbType = new Combo( composite, SWT.BORDER | SWT.READ_ONLY );
cmbType.setLayoutData( gd );
cmbType.setVisibleItemCount( 30 );
cmbType.addSelectionListener( new SelectionListener( ) {
public void widgetDefaultSelected( SelectionEvent arg0 )
{
validate( );
}
public void widgetSelected( SelectionEvent arg0 )
{
modifyDialogContent( );
validate( );
}
} );
Label allowExportLabel = new Label( composite, SWT.NONE );
allowExportLabel.setText( ALLOW_EXPORT_LABEL );
btnAllowExport = new Button( composite, SWT.CHECK );
btnAllowExport.setText( ALLOW_EXPORT_BUTTON );
btnAllowExport.setSelection( true );
GridData gd1 = new GridData( GridData.FILL_HORIZONTAL );
gd1.horizontalSpan = 3;
gd1.widthHint = 200;
gd1.heightHint = cmbType.computeSize( SWT.DEFAULT, SWT.DEFAULT ).y;
btnAllowExport.setLayoutData( gd1 );
btnAllowExport.addSelectionListener( new SelectionAdapter( ) {
public void widgetSelected( SelectionEvent e )
{
modifyDialogContent( );
}
} );
// WidgetUtil.setExcludeGridData( allowExportLabel, true );
// WidgetUtil.setExcludeGridData( btnAllowExport, true );
if ( isAggregate( ) )
{
createAggregateSection( composite );
}
else
{
createCommonSection( composite );
}
createMessageSection( composite );
gd = new GridData( GridData.FILL_BOTH );
composite.setLayoutData( gd );
setContentSize( composite );
}
private void openKeySelectionDialog( )
{
ResourceEditDialog dlg = new ResourceEditDialog( composite.getShell( ),
Messages.getString( "ResourceKeyDescriptor.title.SelectKey" ) ); //$NON-NLS-1$
dlg.setResourceURLs( getResourceURLs( ) );
if ( dlg.open( ) == Window.OK )
{
String[] result = (String[]) dlg.getDetailResult( );
if ( result != null && result.length > 1 )
{
txtDisplayNameID.setText( DEUtil.resolveNull( result[0] ) );
txtDisplayName.setText( DEUtil.resolveNull( result[1] ) );
modifyDialogContent( );
updateRemoveBtnState( );
}
}
}
private boolean hasInitDialog = false;
public void initDialog( )
{
cmbType.setItems( dataTypes );
// txtDisplayName.setFocus( );
// initiate function firstly then data type field.
// Expression gets the comment.
if ( txtExpression != null && !txtExpression.isDisposed( ) )// add
// if/else
// block to
// fix TED
// 52776:NPE
// thrown
{
txtExpression.setFocus( );
}
else
{
txtDisplayName.setFocus( );
}
if ( isAggregate( ) )
{
initFunction( );
initFilter( );
initGroups( );
}
if ( isCreate )// create
{
if ( isRef )
{
if ( getBinding( ) == null )
{
for ( Iterator iterator = getBindingHolder( ).getDataBindingReference( )
.getColumnBindings( )
.iterator( ); iterator.hasNext( ); )
{
ComputedColumnHandle computedColumn = (ComputedColumnHandle) iterator.next( );
if ( isAggregate( ) )
{
if ( computedColumn.getAggregateFunction( ) == null
|| computedColumn.getAggregateFunction( )
.equals( "" ) ) //$NON-NLS-1$
continue;
}
else
{
if ( computedColumn.getAggregateFunction( ) != null
&& !computedColumn.getAggregateFunction( )
.equals( "" ) ) //$NON-NLS-1$
continue;
}
cmbName.add( computedColumn.getName( ) );
}
}
else
{
setDisplayName( getBinding( ).getDisplayName( ) );
setDisplayNameID( getBinding( ).getDisplayNameID( ) );
setAllowExport( getBinding( ).allowExport( ) );
for ( int i = 0; i < DATA_TYPE_CHOICES.length; i++ )
{
if ( DATA_TYPE_CHOICES[i].getName( )
.equals( getBinding( ).getDataType( ) ) )
{
setTypeSelect( DATA_TYPE_CHOICES[i].getDisplayName( ) );
break;
}
}
setDataFieldExpression( getBinding( ) );
}
}
else
{
this.newBinding = StructureFactory.newComputedColumn( getBindingHolder( ),
isAggregate( ) ? DEFAULT_AGGREGATION_NAME
: DEFAULT_ITEM_NAME );
setName( this.newBinding.getName( ) );
setAllowExport( this.newBinding.allowExport( ) );
if ( !isAggregate( ) )
{
setTypeSelect( getDataTypeDisplayName( DesignChoiceConstants.COLUMN_DATA_TYPE_STRING ) );
}
}
}
else
{
if ( isRef )
{
int i = 0;
for ( Iterator iterator = getBindingHolder( ).getDataBindingReference( )
.getColumnBindings( )
.iterator( ); iterator.hasNext( ); )
{
ComputedColumnHandle computedColumn = (ComputedColumnHandle) iterator.next( );
if ( isAggregate( ) )
{
if ( computedColumn.getAggregateFunction( ) == null
|| computedColumn.getAggregateFunction( )
.equals( "" ) ) //$NON-NLS-1$
continue;
}
else
{
if ( computedColumn.getAggregateFunction( ) != null
&& !computedColumn.getAggregateFunction( )
.equals( "" ) ) //$NON-NLS-1$
continue;
}
cmbName.add( computedColumn.getName( ) );
if ( getBinding( ).getName( )
.equals( computedColumn.getName( ) ) )
cmbName.select( i );
i++;
}
setDisplayName( getBinding( ).getDisplayName( ) );
setDisplayNameID( getBinding( ).getDisplayNameID( ) );
setAllowExport( getBinding( ).allowExport( ) );
for ( i = 0; i < DATA_TYPE_CHOICES.length; i++ )
{
if ( DATA_TYPE_CHOICES[i].getName( )
.equals( getBinding( ).getDataType( ) ) )
{
setTypeSelect( DATA_TYPE_CHOICES[i].getDisplayName( ) );
break;
}
}
setDataFieldExpression( getBinding( ) );
}
else
{
setName( getBinding( ).getName( ) );
setDisplayName( getBinding( ).getDisplayName( ) );
setDisplayNameID( getBinding( ).getDisplayNameID( ) );
setAllowExport( getBinding( ).allowExport( ) );
if ( getBinding( ).getDataType( ) != null )
{
if ( DATA_TYPE_CHOICE_SET.findChoice( getBinding( ).getDataType( ) ) != null )
setTypeSelect( DATA_TYPE_CHOICE_SET.findChoice( getBinding( ).getDataType( ) )
.getDisplayName( ) );
else
// the old type 'any'
cmbType.setText( "" ); //$NON-NLS-1$
}
setDataFieldExpression( getBinding( ) );
}
}
if ( !isCreate )
{
if ( isRef )
{
this.cmbName.setEnabled( true );
}
else
{
this.txtName.setEnabled( false );
}
}
validate( );
hasInitDialog = true;
}
private void initExpressionButton( ExpressionHandle expressionHandle,
Text text )
{
ExpressionButtonUtil.initExpressionButtonControl( text,
expressionHandle );
}
private void initFilter( )
{
if ( binding != null )
{
ExpressionHandle expressionHandle = binding.getExpressionProperty( ComputedColumn.FILTER_MEMBER );
initExpressionButton( expressionHandle, txtFilter );
}
}
private void initFunction( )
{
cmbFunction.setItems( getFunctionDisplayNames( ) );
// cmbFunction.add( NULL, 0 );
if ( binding == null )
{
cmbFunction.select( 0 );
handleFunctionSelectEvent( );
return;
}
try
{
String functionString = getFunctionDisplayName( DataAdapterUtil.adaptModelAggregationType( binding.getAggregateFunction( ) ) );
int itemIndex = getItemIndex( getFunctionDisplayNames( ),
functionString );
cmbFunction.select( itemIndex );
handleFunctionSelectEvent( );
}
catch ( AdapterException e )
{
ExceptionHandler.handle( e );
}
// List args = getFunctionArgs( functionString );
// bindingColumn.argumentsIterator( )
// FIXME backforward compatible with binding getExpression
for ( Iterator iterator = binding.argumentsIterator( ); iterator.hasNext( ); )
{
AggregationArgumentHandle arg = (AggregationArgumentHandle) iterator.next( );
String argName = DataAdapterUtil.adaptArgumentName( arg.getName( ) );
if ( paramsMap.containsKey( argName ) )
{
if ( arg.getValue( ) != null )
{
Control control = paramsMap.get( argName );
if ( control instanceof Text )
{
( (Text) control ).setText( arg.getValue( ) );
}
else if ( control instanceof Combo )
{
( (Combo) control ).setText( arg.getValue( ) );
}
}
}
}
}
private String[] getFunctionDisplayNames( )
{
IAggrFunction[] choices = getFunctions( );
if ( choices == null )
return new String[0];
String[] displayNames = new String[choices.length];
for ( int i = 0; i < choices.length; i++ )
{
displayNames[i] = choices[i].getDisplayName( );
}
java.util.Arrays.sort( displayNames, new AlphabeticallyComparator( ) );
return displayNames;
}
private IAggrFunction getFunctionByDisplayName( String displayName )
{
IAggrFunction[] choices = getFunctions( );
if ( choices == null )
return null;
for ( int i = 0; i < choices.length; i++ )
{
if ( choices[i].getDisplayName( ).equals( displayName ) )
{
return choices[i];
}
}
return null;
}
private String getFunctionDisplayName( String function )
{
try
{
return DataUtil.getAggregationManager( )
.getAggregation( function )
.getDisplayName( );
}
catch ( BirtException e )
{
ExceptionHandler.handle( e );
return null;
}
}
private IAggrFunction[] getFunctions( )
{
try
{
List aggrInfoList = DataUtil.getAggregationManager( )
.getAggregations( AggregationManager.AGGR_TABULAR );
return (IAggrFunction[]) aggrInfoList.toArray( new IAggrFunction[0] );
}
catch ( BirtException e )
{
ExceptionHandler.handle( e );
return new IAggrFunction[0];
}
}
private void initTextField( Text txtParam, IParameterDefn param )
{
if ( paramsValueMap.containsKey( param.getName( ) ) )
{
txtParam.setText( paramsValueMap.get( param.getName( ) )[0] );
txtParam.setData( ExpressionButtonUtil.EXPR_TYPE,
paramsValueMap.get( param.getName( ) )[1] );
ExpressionButton button = (ExpressionButton) txtParam.getData( ExpressionButtonUtil.EXPR_BUTTON );
if ( button != null )
button.refresh( );
return;
}
if ( binding != null )
{
for ( Iterator iterator = binding.argumentsIterator( ); iterator.hasNext( ); )
{
AggregationArgumentHandle arg = (AggregationArgumentHandle) iterator.next( );
if ( arg.getName( ).equals( param.getName( ) ) )
{
ExpressionButtonUtil.initExpressionButtonControl( txtParam,
arg,
AggregationArgument.VALUE_MEMBER );
return;
}
}
}
}
/**
* fill the cmbDataField with binding holder's bindings
*
* @param param
*/
private void initDataFields( Combo cmbDataField, IParameterDefn param )
{
cmbDataField.setItems( getColumnBindings( ) );
if ( paramsValueMap.containsKey( param.getName( ) ) )
{
cmbDataField.setText( paramsValueMap.get( param.getName( ) )[0] );
cmbDataField.setData( ExpressionButtonUtil.EXPR_TYPE,
paramsValueMap.get( param.getName( ) )[1] );
ExpressionButton button = (ExpressionButton) cmbDataField.getData( ExpressionButtonUtil.EXPR_BUTTON );
if ( button != null )
button.refresh( );
return;
}
if ( binding != null )
{
ExpressionHandle expressionHandle = null;
for ( Iterator iterator = binding.argumentsIterator( ); iterator.hasNext( ); )
{
AggregationArgumentHandle arg = (AggregationArgumentHandle) iterator.next( );
if ( arg.getName( ).equals( param.getName( ) ) )
{
ExpressionHandle value = arg.getExpressionProperty( AggregationArgument.VALUE_MEMBER );
expressionHandle = value;
break;
}
}
if ( expressionHandle == null )
expressionHandle = binding.getExpressionProperty( ComputedColumn.EXPRESSION_MEMBER );
ExpressionButtonUtil.initExpressionButtonControl( cmbDataField,
expressionHandle );
}
Object button = cmbDataField.getData( ExpressionButtonUtil.EXPR_BUTTON );
if ( button instanceof ExpressionButton )
{
if ( !( (ExpressionButton) button ).isSupportType( ExpressionType.JAVASCRIPT ) )
{
cmbDataField.removeAll( );
}
}
}
private String[] getColumnBindings( )
{
List elementsList = DEUtil.getVisiableColumnBindingsList( getBindingHolder( ) );
String[] bindings = new String[elementsList.size( )];
for ( int i = 0; i < bindings.length; i++ )
{
bindings[i] = ( (ComputedColumnHandle) elementsList.get( i ) ).getName( );
}
return bindings;
}
private void initGroups( )
{
String[] groups = getGroups( );
if ( groups.length > 0 )
{
cmbGroup.setItems( groups );
if ( binding != null && binding.getAggregateOn( ) != null )
{
btnGroup.setSelection( true );
btnTable.setSelection( false );
if ( !isRef )
cmbGroup.setEnabled( true );
for ( int i = 0; i < groups.length; i++ )
{
if ( groups[i].equals( binding.getAggregateOn( ) ) )
{
cmbGroup.select( i );
return;
}
}
}
else
{
// BUG 201963
if ( this.container instanceof DesignElementHandle
&& ( (DesignElementHandle) this.container ).getContainer( )
.getContainer( ) instanceof TableGroupHandle )
{
TableGroupHandle groupHandle = (TableGroupHandle) ( (DesignElementHandle) this.container ).getContainer( )
.getContainer( );
for ( int i = 0; i < groups.length; i++ )
{
if ( groups[i].equals( groupHandle.getName( ) ) )
{
cmbGroup.select( i );
}
}
btnTable.setSelection( false );
btnGroup.setSelection( true );
}
else if ( this.container instanceof ListGroupHandle )
{
ListGroupHandle groupHandle = (ListGroupHandle) this.container;
for ( int i = 0; i < groups.length; i++ )
{
if ( groups[i].equals( groupHandle.getName( ) ) )
{
cmbGroup.select( i );
}
}
btnTable.setSelection( false );
btnGroup.setSelection( true );
}
else
{
btnTable.setSelection( true );
btnGroup.setSelection( false );
cmbGroup.select( 0 );
cmbGroup.setEnabled( false );
}
}
}
else
{
btnGroup.setEnabled( false );
cmbGroup.setEnabled( false );
btnTable.setSelection( true );
}
}
private String[] getGroups( )
{
if ( getBindingHolder( ) instanceof ListingHandle )
{
ListingHandle listingHandle = (ListingHandle) getBindingHolder( );
List groupNames = new ArrayList( );
for ( int i = 0; i < listingHandle.getGroups( ).getCount( ); i++ )
{
String groupName = ( (GroupHandle) listingHandle.getGroups( )
.get( i ) ).getName( );
if ( groupName != null )
groupNames.add( groupName );
}
return (String[]) groupNames.toArray( new String[0] );
}
return new String[0];
}
private void setDataFieldExpression( ComputedColumnHandle binding )
{
if ( binding != null )
{
if ( txtExpression != null && !txtExpression.isDisposed( ) )
{
ExpressionButtonUtil.initExpressionButtonControl( txtExpression,
binding,
ComputedColumn.EXPRESSION_MEMBER );
}
}
}
private void setName( String name )
{
if ( name != null && txtName != null )
txtName.setText( name );
}
private void setDisplayName( String displayName )
{
if ( displayName != null && txtDisplayName != null )
txtDisplayName.setText( displayName );
}
private void setDisplayNameID( String displayNameID )
{
if ( displayNameID != null && txtDisplayNameID != null )
txtDisplayNameID.setText( displayNameID );
}
private void setAllowExport( boolean allowExport )
{
if ( btnAllowExport != null )
btnAllowExport.setSelection( allowExport );
}
private void setTypeSelect( String typeSelect )
{
if ( dataTypes != null && cmbType != null )
{
if ( typeSelect != null )
cmbType.select( getItemIndex( cmbType.getItems( ), typeSelect ) );
else
cmbType.select( 0 );
}
}
private int getItemIndex( String[] items, String item )
{
for ( int i = 0; i < items.length; i++ )
{
if ( items[i].equals( item ) )
return i;
}
return -1;
}
protected void createAggregateSection( Composite composite )
{
new Label( composite, SWT.NONE ).setText( FUNCTION );
cmbFunction = new Combo( composite, SWT.BORDER | SWT.READ_ONLY );
GridData gd = new GridData( GridData.FILL_HORIZONTAL );
gd.horizontalSpan = 3;
cmbFunction.setLayoutData( gd );
cmbFunction.setVisibleItemCount( 30 );
// WidgetUtil.createGridPlaceholder( composite, 1, false );
cmbFunction.addSelectionListener( new SelectionAdapter( ) {
public void widgetSelected( SelectionEvent e )
{
modifyDialogContent( );
handleFunctionSelectEvent( );
validate( );
}
} );
paramsComposite = new Composite( composite, SWT.NONE );
GridData gridData = new GridData( GridData.FILL_HORIZONTAL );
gridData.horizontalIndent = 0;
gridData.horizontalSpan = 4;
gridData.exclude = true;
paramsComposite.setLayoutData( gridData );
GridLayout layout = new GridLayout( );
// layout.horizontalSpacing = layout.verticalSpacing = 0;
layout.marginWidth = layout.marginHeight = 0;
layout.numColumns = 3;
Layout parentLayout = paramsComposite.getParent( ).getLayout( );
if ( parentLayout instanceof GridLayout )
layout.horizontalSpacing = ( (GridLayout) parentLayout ).horizontalSpacing;
paramsComposite.setLayout( layout );
createFilterCondition(composite, gd);
final Label lblAggOn = new Label( composite, SWT.NONE );
lblAggOn.setText( AGGREGATE_ON );
gridData = new GridData( );
gridData.verticalAlignment = GridData.BEGINNING;
lblAggOn.setLayoutData( gridData );
Composite aggOnComposite = new Composite( composite, SWT.NONE );
gridData = new GridData( GridData.FILL_HORIZONTAL );
gridData.horizontalSpan = 3;
aggOnComposite.setLayoutData( gridData );
layout = new GridLayout( );
layout.horizontalSpacing = layout.verticalSpacing = 0;
layout.marginWidth = layout.marginHeight = 0;
layout.numColumns = 2;
aggOnComposite.setLayout( layout );
btnTable = new Button( aggOnComposite, SWT.RADIO );
if ( getBindingHolder( ) instanceof TableHandle )
btnTable.setText( TABLE );
else if ( getBindingHolder( ) instanceof ListHandle )
btnTable.setText( LIST );
else if ( getBindingHolder( ) instanceof GridHandle )
btnTable.setText( GRID );
btnTable.addSelectionListener( new SelectionListener( ) {
public void widgetDefaultSelected( SelectionEvent e )
{
}
public void widgetSelected( SelectionEvent e )
{
modifyDialogContent( );
cmbGroup.setEnabled( false );
}
} );
btnTable.getAccessible( )
.addAccessibleListener( new AccessibleAdapter( ) {
public void getName( AccessibleEvent e )
{
e.result = UIUtil.stripMnemonic( lblAggOn.getText( ) )
+ UIUtil.stripMnemonic( btnTable.getText( ) );
}
} );
WidgetUtil.createGridPlaceholder( aggOnComposite, 1, false );
btnGroup = new Button( aggOnComposite, SWT.RADIO );
btnGroup.setText( GROUP );
btnGroup.addSelectionListener( new SelectionListener( ) {
public void widgetDefaultSelected( SelectionEvent e )
{
}
public void widgetSelected( SelectionEvent e )
{
modifyDialogContent( );
cmbGroup.setEnabled( true );
}
} );
btnGroup.getAccessible( )
.addAccessibleListener( new AccessibleAdapter( ) {
public void getName( AccessibleEvent e )
{
e.result = UIUtil.stripMnemonic( lblAggOn.getText( ) )
+ UIUtil.stripMnemonic( btnGroup.getText( ) );
}
} );
cmbGroup = new Combo( aggOnComposite, SWT.BORDER | SWT.READ_ONLY );
cmbGroup.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
cmbGroup.setVisibleItemCount( 30 );
cmbGroup.addSelectionListener( new SelectionAdapter( ) {
public void widgetSelected( SelectionEvent e )
{
modifyDialogContent( );
}
} );
cmbFunction.addSelectionListener( new SelectionAdapter( ) {
public void widgetSelected( SelectionEvent e )
{
modifyDialogContent( );
}
} );
if ( isRef )
{
txtDisplayName.setEnabled( false );
txtDisplayNameID.setEnabled( false );
btnDisplayNameID.setEnabled( false );
cmbType.setEnabled( false );
cmbFunction.setEnabled( false );
// cmbDataField.setEnabled( false );
txtFilter.setEnabled( false );
paramsComposite.setEnabled( false );
cmbGroup.setEnabled( false );
btnTable.setEnabled( false );
btnGroup.setEnabled( false );
}
}
private void createFilterCondition( Composite composite,GridData gd)
{
new Label( composite, SWT.NONE ).setText( FILTER_CONDITION );
txtFilter = new Text( composite, SWT.BORDER | SWT.MULTI );
gd = new GridData( GridData.FILL_HORIZONTAL );
gd.heightHint = txtFilter.computeSize( SWT.DEFAULT, SWT.DEFAULT ).y
- txtFilter.getBorderWidth( )
* 2;
gd.horizontalSpan = 2;
txtFilter.setLayoutData( gd );
txtFilter.addModifyListener( new ModifyListener( ) {
public void modifyText( ModifyEvent arg0 )
{
modifyDialogContent( );
validate( );
}
} );
createExpressionButton( composite, txtFilter );
}
private void createCommonSection( Composite composite )
{
new Label( composite, SWT.NONE ).setText( EXPRESSION );
txtExpression = new Text( composite, SWT.BORDER | SWT.MULTI );
GridData gd = new GridData( GridData.FILL_HORIZONTAL );
gd.horizontalSpan = 2;
gd.heightHint = txtExpression.computeSize( SWT.DEFAULT, SWT.DEFAULT ).y
- txtExpression.getBorderWidth( )
* 2;
txtExpression.setLayoutData( gd );
createExpressionButton( composite, txtExpression );
txtExpression.addModifyListener( new ModifyListener( ) {
public void modifyText( ModifyEvent e )
{
modifyDialogContent( );
validate( );
}
} );
if ( isRef )
{
txtDisplayName.setEnabled( false );
txtDisplayNameID.setEnabled( false );
btnDisplayNameID.setEnabled( false );
cmbType.setEnabled( false );
txtExpression.setEnabled( false );
}
}
private void createMessageSection( Composite composite )
{
messageLine = new CLabel( composite, SWT.LEFT );
GridData layoutData = new GridData( GridData.FILL_HORIZONTAL );
layoutData.horizontalSpan = 4;
messageLine.setLayoutData( layoutData );
}
public void setMessage( String message )
{
this.messageLine.setText( message );
this.messageLine.setImage( null );
}
public void setErrorMessage( String message )
{
this.messageLine.setText( message );
this.messageLine.setImage( PlatformUI.getWorkbench( )
.getSharedImages( )
.getImage( ISharedImages.IMG_OBJS_ERROR_TSK ) );
}
private void verifyInput( )
{
if ( isRef )
{
if ( cmbName.getText( ) == null || cmbName.getText( ).equals( "" ) ) //$NON-NLS-1$
{
dialog.setCanFinish( false );
}
else
{
dialogCanFinish( );
}
return;
}
if ( txtName != null
&& ( txtName.getText( ) == null || txtName.getText( )
.trim( )
.equals( "" ) ) ) //$NON-NLS-1$
{
setErrorMessage( Messages.getFormattedString( "BindingDialogHelper.error.empty", //$NON-NLS-1$
new Object[]{
NAME_LABEL
} ) );
dialog.setCanFinish( false );
return;
}
if ( cmbType.getText( ) == null || cmbType.getText( ).equals( "" ) ) //$NON-NLS-1$
{
dialog.setCanFinish( false );
return;
}
if ( this.binding == null )// create bindnig, we should check if the
// binding name already exists.
{
for ( Iterator iterator = this.bindingHolder.getColumnBindings( )
.iterator( ); iterator.hasNext( ); )
{
ComputedColumnHandle computedColumn = (ComputedColumnHandle) iterator.next( );
if ( computedColumn.getName( ).equals( txtName.getText( ) ) )
{
dialog.setCanFinish( false );
setErrorMessage( Messages.getFormattedString( "BindingDialogHelper.error.nameduplicate", //$NON-NLS-1$
new Object[]{
txtName.getText( )
} ) );
return;
}
}
}
setMessage( "" ); //$NON-NLS-1$
if ( txtExpression != null
&& ( txtExpression.getText( ) == null || txtExpression.getText( )
.trim( )
.equals( "" ) ) ) //$NON-NLS-1$
{
// This is a special calse if the item is data item,and the
// container is LibraryHandle,allow the empty expression
if ( !isAllowEmyptExpression( ) )
{
dialog.setCanFinish( false );
return;
}
}
// check non optional parameter is not empty
if ( isAggregate( ) )
{
try
{
IAggrFunction aggregation = DataUtil.getAggregationManager( )
.getAggregation( getFunctionByDisplayName( cmbFunction.getText( ) ).getName( ) );
if ( aggregation.getParameterDefn( ).length > 0 )
{
IParameterDefn[] parameters = aggregation.getParameterDefn( );
for ( IParameterDefn param : parameters )
{
if ( !param.isOptional( ) )
{
Control control = paramsMap.get( param.getName( ) );
String paramValue = null;
if ( control instanceof Text )
{
paramValue = ( (Text) control ).getText( );
}
if ( control instanceof Combo )
{
paramValue = ( (Combo) control ).getText( );
}
if ( paramValue == null
|| paramValue.trim( ).equals( "" ) ) //$NON-NLS-1$
{
dialog.setCanFinish( false );
setErrorMessage( Messages.getFormattedString( "BindingDialogHelper.error.empty", //$NON-NLS-1$
new String[]{
param.getDisplayName( ).replaceAll("\\(&[a-zA-Z0-9]\\)", "").replaceAll("&", "")
} ));
return;
}
}
}
}
}
catch ( BirtException e )
{
// TODO show error message in message panel
}
}
dialogCanFinish( );
}
private boolean isAllowEmyptExpression( )
{
ReportItemHandle itemHandle = getBindingHolder( );
return itemHandle instanceof DataItemHandle
&& itemHandle.getDataSet( ) == null
&& itemHandle.getContainer( ) instanceof LibraryHandle;
}
private void dialogCanFinish( )
{
if ( !isAllowEmyptExpression( ) && !hasModified && isEditModal( ) )
dialog.setCanFinish( false );
else
dialog.setCanFinish( true );
}
/**
* Create function parameters area. If parameter is data field type, create
* a combo box filled with binding holder's computed column.
*/
private void handleFunctionSelectEvent( )
{
if ( isRef )
return;
Control[] children = paramsComposite.getChildren( );
for ( int i = 0; i < children.length; i++ )
{
children[i].dispose( );
}
IAggrFunction function = getFunctionByDisplayName( cmbFunction.getText( ) );
if ( function != null )
{
paramsMap.clear( );
IParameterDefn[] params = function.getParameterDefn( );
if ( params.length > 0 )
{
( (GridData) paramsComposite.getLayoutData( ) ).exclude = false;
( (GridData) paramsComposite.getLayoutData( ) ).heightHint = SWT.DEFAULT;
int width = 0;
if ( paramsComposite.getParent( ).getLayout( ) instanceof GridLayout )
{
Control[] controls = paramsComposite.getParent( )
.getChildren( );
for ( int i = 0; i < controls.length; i++ )
{
if ( controls[i] instanceof Label
&& ( (GridData) controls[i].getLayoutData( ) ).horizontalSpan == 1 )
{
int labelWidth = controls[i].getBounds( ).width
- controls[i].getBorderWidth( )
* 2;
if ( labelWidth > width )
width = labelWidth;
}
}
}
for ( final IParameterDefn param : params )
{
Label lblParam = new Label( paramsComposite, SWT.NONE );
lblParam.setText( param.getDisplayName( )
+ Messages.getString( "BindingDialogHelper.text.Colon" ) ); //$NON-NLS-1$
GridData gd = new GridData( );
gd.widthHint = lblParam.computeSize( SWT.DEFAULT,
SWT.DEFAULT ).x;
if ( gd.widthHint < width )
gd.widthHint = width;
lblParam.setLayoutData( gd );
if ( param.isDataField( ) )
{
cmbDataField = new Combo( paramsComposite, SWT.BORDER );
cmbDataField.setLayoutData( new GridData( GridData.FILL_HORIZONTAL
| GridData.GRAB_HORIZONTAL ) );
cmbDataField.setVisibleItemCount( 30 );
createExpressionButton( paramsComposite, cmbDataField );
initDataFields( cmbDataField, param );
cmbDataField.addModifyListener( new ModifyListener( ) {
public void modifyText( ModifyEvent e )
{
modifyDialogContent( );;
validate( );
paramsValueMap.put( param.getName( ),
new String[]{
cmbDataField.getText( ),
(String) cmbDataField.getData( ExpressionButtonUtil.EXPR_TYPE )
} );
}
} );
cmbDataField.addSelectionListener( new SelectionAdapter( ) {
public void widgetSelected( SelectionEvent e )
{
String expr = getColumnBindingExpressionByName( cmbDataField.getText( ) );
if ( expr != null )
{
cmbDataField.setText( expr );
}
// cmbDataField.setData(
// ExpressionButtonUtil.EXPR_TYPE,
// ExpressionType.JAVASCRIPT );
// ExpressionButton button = (ExpressionButton)
// cmbDataField.getData(
// ExpressionButtonUtil.EXPR_BUTTON );
// if ( button != null )
// button.refresh( );
}
} );
paramsMap.put( param.getName( ), cmbDataField );
}
else
{
final Text txtParam = new Text( paramsComposite,
SWT.BORDER | SWT.MULTI );
txtParam.addModifyListener( new ModifyListener( ) {
public void modifyText( ModifyEvent e )
{
modifyDialogContent( );
validate( );
paramsValueMap.put( param.getName( ),
new String[]{
txtParam.getText( ),
(String) txtParam.getData( ExpressionButtonUtil.EXPR_TYPE )
} );
}
} );
GridData gridData = new GridData( GridData.FILL_HORIZONTAL );
gridData.heightHint = txtParam.computeSize( SWT.DEFAULT,
SWT.DEFAULT ).y
- txtParam.getBorderWidth( )
* 2;
gridData.horizontalIndent = 0;
txtParam.setLayoutData( gridData );
createExpressionButton( paramsComposite, txtParam );
paramsMap.put( param.getName( ), txtParam );
initTextField( txtParam, param );
}
}
}
else
{
( (GridData) paramsComposite.getLayoutData( ) ).heightHint = 0;
( (GridData) paramsComposite.getLayoutData( ) ).exclude = true;
}
try
{
cmbType.setText( getDataTypeDisplayName( DataAdapterUtil.adapterToModelDataType( DataUtil.getAggregationManager( )
.getAggregation( function.getName( ) )
.getDataType( ) ) ) );
}
catch ( BirtException e )
{
ExceptionHandler.handle( e );
}
}
else
{
( (GridData) paramsComposite.getLayoutData( ) ).heightHint = 0;
( (GridData) paramsComposite.getLayoutData( ) ).exclude = true;
// new Label( argsComposite, SWT.NONE ).setText( "no args" );
}
paramsComposite.layout( true, true );
paramsComposite.getParent( ).layout( true, true );
setContentSize( composite );
}
private void createExpressionButton( Composite parent, final Control control )
{
Listener listener = new Listener( ) {
public void handleEvent( Event event )
{
modifyDialogContent( );
validate( );
}
};
if ( expressionProvider == null )
{
IExtendedDataModelUIAdapter adapter = ExtendedDataModelUIAdapterHelper.getInstance( ).getAdapter( );
if(adapter != null && adapter.getBoundExtendedData( this.bindingHolder ) != null)
{
expressionProvider = adapter.getBindingExpressionProvider( this.bindingHolder, this.binding );
}
else
{
expressionProvider = new BindingExpressionProvider( this.bindingHolder,
this.binding );
}
}
ExpressionButton button = ExpressionButtonUtil.createExpressionButton( parent,
control,
expressionProvider,
this.bindingHolder,
listener );
if ( isRef )
{
button.setEnabled( false );
}
}
private String getColumnBindingExpressionByName( String name )
{
List elementsList = DEUtil.getVisiableColumnBindingsList( this.bindingHolder );
for ( Iterator iterator = elementsList.iterator( ); iterator.hasNext( ); )
{
ComputedColumnHandle binding = (ComputedColumnHandle) iterator.next( );
if ( binding.getName( ).equals( name ) )
return ExpressionButtonUtil.getCurrentExpressionConverter( cmbDataField )
.getBindingExpression( name );
}
return null;
}
private String getArgumentByDisplayName( String function, String argument )
{
try
{
IAggrFunction info = DataUtil.getAggregationManager( )
.getAggregation( function );
for ( IParameterDefn param : info.getParameterDefn( ) )
{
if ( param.getDisplayName( ).equals( argument ) )
return param.getName( );
}
}
catch ( BirtException e )
{
ExceptionHandler.handle( e );
}
return null;
}
private String getArgumentDisplayNameByName( String function,
String argument )
{
try
{
IAggrFunction info = DataUtil.getAggregationManager( )
.getAggregation( function );
for ( IParameterDefn param : info.getParameterDefn( ) )
{
if ( param.getName( ).equals( argument ) )
return param.getDisplayName( );
}
}
catch ( BirtException e )
{
ExceptionHandler.handle( e );
}
return null;
}
public void validate( )
{
verifyInput( );
updateRemoveBtnState( );
}
public boolean differs( ComputedColumnHandle binding )
{
if ( isAggregate( ) )
{
if ( txtName != null
&& !strEquals( txtName.getText( ), binding.getName( ) ) )
return true;
if ( cmbName != null
&& !strEquals( cmbName.getText( ), binding.getName( ) ) )
return true;
if ( btnAllowExport.getSelection( ) != binding.allowExport( ) )
return true;
if ( !strEquals( binding.getDisplayName( ),
txtDisplayName.getText( ) ) )
return true;
if ( !strEquals( binding.getDisplayNameID( ),
txtDisplayNameID.getText( ) ) )
return true;
if ( !strEquals( binding.getDataType( ), getDataType( ) ) )
return true;
try
{
if ( !strEquals( DataAdapterUtil.adaptModelAggregationType( binding.getAggregateFunction( ) ),
getFunctionByDisplayName( cmbFunction.getText( ) ).getName( ) ) )
return true;
}
catch ( AdapterException e )
{
}
if ( !expressionEquals( binding.getExpressionProperty( ComputedColumn.FILTER_MEMBER ),
txtFilter ) )
return true;
if ( btnTable.getSelection( ) == ( binding.getAggregateOn( ) != null ) )
return true;
if ( !btnTable.getSelection( )
&& !binding.getAggregateOn( ).equals( cmbGroup.getText( ) ) )
return true;
boolean hasArguments = false;
for ( Iterator iterator = binding.argumentsIterator( ); iterator.hasNext( ); )
{
AggregationArgumentHandle handle = (AggregationArgumentHandle) iterator.next( );
if ( paramsMap.containsKey( handle.getName( ) ) )
{
String[] paramValue = getControlValue( paramsMap.get( handle.getName( ) ) );
if ( !expressionEquals( handle.getExpressionProperty( AggregationArgument.VALUE_MEMBER ),
paramValue ) )
{
return true;
}
}
else
{
return true;
}
hasArguments = true;
}
if ( !hasArguments && !paramsMap.isEmpty( ) )
return true;
}
else
{
if ( txtName != null
&& !strEquals( txtName.getText( ), binding.getName( ) ) )
return true;
if ( cmbName != null
&& !strEquals( cmbName.getText( ), binding.getName( ) ) )
return true;
if ( !strEquals( txtDisplayName.getText( ),
binding.getDisplayName( ) ) )
return true;
if ( !strEquals( txtDisplayNameID.getText( ),
binding.getDisplayNameID( ) ) )
return true;
if ( btnAllowExport.getSelection( ) != binding.allowExport( ) )
return true;
if ( !strEquals( getDataType( ), binding.getDataType( ) ) )
return true;
if ( !expressionEquals( binding.getExpressionProperty( ComputedColumn.EXPRESSION_MEMBER ),
txtExpression ) )
return true;
}
return false;
}
private boolean expressionEquals( ExpressionHandle expressionHandle,
Text text )
{
if ( expressionHandle == null )
{
if ( text.getText( ).trim( ).length( ) == 0 )
return true;
}
else
{
if ( strEquals( expressionHandle.getStringExpression( ),
text.getText( ) )
&& strEquals( expressionHandle.getType( ),
(String) text.getData( ExpressionButtonUtil.EXPR_TYPE ) ) )
return true;
}
return false;
}
private boolean expressionEquals( ExpressionHandle expressionHandle,
String[] strs )
{
if ( expressionHandle == null )
{
if ( strs == null || strs[0].trim( ).length( ) == 0 )
return true;
}
else
{
if ( strs != null
&& strEquals( expressionHandle.getStringExpression( ),
strs[0] )
&& strEquals( expressionHandle.getType( ), strs[1] ) )
return true;
}
return false;
}
private String[] getControlValue( Control control )
{
if ( control instanceof Text )
{
return new String[]{
( (Text) control ).getText( ),
(String) control.getData( ExpressionButtonUtil.EXPR_TYPE )
};
}
else if ( control instanceof Combo )
{
return new String[]{
( (Combo) control ).getText( ),
(String) control.getData( ExpressionButtonUtil.EXPR_TYPE )
};
}
return null;
}
private boolean strEquals( String left, String right )
{
if ( left == right )
return true;
if ( left == null )
return "".equals( right ); //$NON-NLS-1$
if ( right == null )
return "".equals( left ); //$NON-NLS-1$
return left.equals( right );
}
private String getDataTypeDisplayName( String dataType )
{
for ( int i = 0; i < DATA_TYPE_CHOICES.length; i++ )
{
if ( dataType.equals( DATA_TYPE_CHOICES[i].getName( ) ) )
{
return DATA_TYPE_CHOICES[i].getDisplayName( );
}
}
return ""; //$NON-NLS-1$
}
private String getDataType( )
{
for ( int i = 0; i < DATA_TYPE_CHOICES.length; i++ )
{
if ( DATA_TYPE_CHOICES[i].getDisplayName( )
.equals( cmbType.getText( ) ) )
{
return DATA_TYPE_CHOICES[i].getName( );
}
}
return ""; //$NON-NLS-1$
}
public ComputedColumnHandle editBinding( ComputedColumnHandle binding )
throws SemanticException
{
if ( isRef )
return getBindingColumn( );
if ( isAggregate( ) )
{
binding.setDisplayName( txtDisplayName.getText( ) );
binding.setDisplayNameID( txtDisplayNameID.getText( ) );
for ( int i = 0; i < DATA_TYPE_CHOICES.length; i++ )
{
if ( DATA_TYPE_CHOICES[i].getDisplayName( )
.equals( cmbType.getText( ) ) )
{
binding.setDataType( DATA_TYPE_CHOICES[i].getName( ) );
break;
}
}
binding.setAllowExport( btnAllowExport.getSelection( ) );
// binding.setExpression( cmbDataField.getText( ) );
binding.setAggregateFunction( getFunctionByDisplayName( cmbFunction.getText( ) ).getName( ) );
ExpressionButtonUtil.saveExpressionButtonControl( txtFilter,
binding,
ComputedColumn.FILTER_MEMBER );
if ( btnTable.getSelection( ) )
{
binding.setAggregateOn( null );
}
else
{
binding.setAggregateOn( cmbGroup.getText( ) );
}
// remove expression created in old version.
binding.setExpression( null );
binding.clearArgumentList( );
for ( Iterator iterator = paramsMap.keySet( ).iterator( ); iterator.hasNext( ); )
{
String arg = (String) iterator.next( );
String[] value = getControlValue( paramsMap.get( arg ) );
if ( value != null )
{
AggregationArgument argHandle = StructureFactory.createAggregationArgument( );
argHandle.setName( arg );
argHandle.setExpressionProperty( AggregationArgument.VALUE_MEMBER,
new Expression( value[0], value[1] ) );
binding.addArgument( argHandle );
}
}
}
else
{
for ( int i = 0; i < DATA_TYPE_CHOICES.length; i++ )
{
if ( DATA_TYPE_CHOICES[i].getDisplayName( )
.equals( cmbType.getText( ) ) )
{
binding.setDataType( DATA_TYPE_CHOICES[i].getName( ) );
break;
}
}
binding.setDisplayName( txtDisplayName.getText( ) );
binding.setDisplayNameID( txtDisplayNameID.getText( ) );
binding.setAllowExport( btnAllowExport.getSelection( ) );
ExpressionButtonUtil.saveExpressionButtonControl( txtExpression,
binding,
ComputedColumn.EXPRESSION_MEMBER );
}
return binding;
}
public ComputedColumnHandle newBinding( ReportItemHandle bindingHolder,
String name ) throws SemanticException
{
if ( isRef )
return getBindingColumn( );
ComputedColumn column = StructureFactory.newComputedColumn( bindingHolder,
name == null ? txtName.getText( ) : name );
ComputedColumnHandle binding = DEUtil.addColumn( bindingHolder,
column,
true );
return editBinding( binding );
}
public void setContainer( Object container )
{
this.container = container;
}
public boolean canProcessWithWarning( )
{
if ( !isAggregate( ) )
{
return true;
}
try
{
// check function type
// if datatype in DTE is any, here will return '', for any is
// deprecated.
String type = getDataTypeDisplayName( DataAdapterUtil.adapterToModelDataType( DataUtil.getAggregationManager( )
.getAggregation( getFunctionByDisplayName( cmbFunction.getText( ) ).getName( ) )
.getDataType( ) ) );
if ( type != null && !type.equals( "" ) //$NON-NLS-1$
&& !type.equals( cmbType.getText( ) ) )
{
if ( !canProcessFunctionTypeError( cmbFunction.getText( ),
cmbType.getText( ),
type ) )
{
return false;
}
}
// check expression is vaid for parameter type
// first get expression column or binding.
IAggrFunction function = getFunctionByDisplayName( cmbFunction.getText( ) );
if ( function != null )
{
DataSetHandle dataSetHandle = DEUtil.getFirstDataSet( this.bindingHolder );
List<ResultSetColumn> columnList = null;
if ( dataSetHandle != null )
{
CachedMetaDataHandle meta = dataSetHandle.getCachedMetaDataHandle( );
if ( meta == null )
{
DataSetUIUtil.updateColumnCache( dataSetHandle );
meta = dataSetHandle.getCachedMetaDataHandle( );
}
columnList = meta.getResultSet( ).getListValue( );
}
List<ComputedColumnHandle> bindingList = DEUtil.getAllColumnBindingList( this.bindingHolder,
true );
loop: for ( IParameterDefn param : function.getParameterDefn( ) )
{
if ( param.isDataField( ) )
{
String[] expression = getControlValue( paramsMap.get( param.getName( ) ) );
if ( expression != null )
{
if ( bindingList != null )
{
String bindingName = ExpressionUtil.getColumnBindingName( expression[0] );
if ( bindingName != null )
for ( ComputedColumnHandle bindingHandle : bindingList )
{
if ( bindingHandle.getName( )
.equals( bindingName ) )
{
if ( !param.supportDataType( DataAdapterUtil.adaptModelDataType( bindingHandle.getDataType( ) ) ) )
{
if ( !canProcessParamTypeError( expression[0],
param.getDisplayName( ) ) )
{
return false;
}
continue loop;
}
}
}
}
if ( columnList != null )
{
String columnName = ExpressionUtil.getColumnName( expression[0] );
if ( columnName != null )
for ( ResultSetColumn column : columnList )
{
if ( column.getColumnName( )
.equals( columnName ) )
{
if ( !param.supportDataType( DataAdapterUtil.adaptModelDataType( column.getDataType( ) ) ) )
{
if ( !canProcessParamTypeError( expression[0],
param.getDisplayName( ) ) )
{
return false;
}
continue loop;
}
}
}
}
}
}
}
}
}
catch ( BirtException e )
{
}
return true;
}
private boolean canProcessFunctionTypeError( String function, String type,
String recommended )
{
MessageDialog dialog = new MessageDialog( UIUtil.getDefaultShell( ),
Messages.getString( "Warning" ), //$NON-NLS-1$
null,
Messages.getFormattedString( "BindingDialogHelper.warning.function", //$NON-NLS-1$
new String[]{
recommended
} ),
MessageDialog.WARNING,
new String[]{
Messages.getString( Messages.getString( "BindingDialogHelper.warning.button.yes" ) ), Messages.getString( Messages.getString( "BindingDialogHelper.warning.button.no" ) ) //$NON-NLS-1$ //$NON-NLS-2$
},
0 );
return dialog.open( ) == 0;
}
private boolean canProcessParamTypeError( String expression,
String parameter )
{
MessageDialog dialog = new MessageDialog( UIUtil.getDefaultShell( ),
Messages.getString( "Warning" ), //$NON-NLS-1$
null,
Messages.getFormattedString( "BindingDialogHelper.warning.parameter", //$NON-NLS-1$
new String[]{
expression, parameter
} ),
MessageDialog.WARNING,
new String[]{
Messages.getString( "BindingDialogHelper.warning.button.yes" ), Messages.getString( "BindingDialogHelper.warning.button.no" ) //$NON-NLS-1$ //$NON-NLS-2$
},
0 );
return dialog.open( ) == 0;
}
private String[] getBaseNames( )
{
List<String> resources = SessionHandleAdapter.getInstance( )
.getReportDesignHandle( )
.getIncludeResources( );
if ( resources == null )
return null;
else
return resources.toArray( new String[0] );
}
private URL[] getAvailableResourceUrls( )
{
List<URL> urls = new ArrayList<URL>( );
String[] baseNames = getBaseNames( );
if ( baseNames == null )
return urls.toArray( new URL[0] );
else
{
for ( int i = 0; i < baseNames.length; i++ )
{
URL url = SessionHandleAdapter.getInstance( )
.getReportDesignHandle( )
.findResource( baseNames[i],
IResourceLocator.MESSAGE_FILE );
if ( url != null )
urls.add( url );
}
return urls.toArray( new URL[0] );
}
}
private URL[] getResourceURLs( )
{
String[] baseNames = getBaseNames( );
if ( baseNames == null )
return null;
else
{
URL[] urls = new URL[baseNames.length];
for ( int i = 0; i < baseNames.length; i++ )
{
urls[i] = SessionHandleAdapter.getInstance( )
.getReportDesignHandle( )
.findResource( baseNames[i],
IResourceLocator.MESSAGE_FILE );
}
return urls;
}
}
private void updateRemoveBtnState( )
{
btnRemoveDisplayNameID.setEnabled( txtDisplayNameID.getText( )
.equals( EMPTY_STRING ) ? false : true );
}
private boolean isEditModal = false;
public void setEditModal( boolean isEditModal )
{
this.isEditModal = isEditModal;
}
public boolean isEditModal( )
{
return isEditModal;
}
private void modifyDialogContent( )
{
if ( hasInitDialog && isEditModal( ) && hasModified == false )
{
hasModified = true;
validate( );
}
}
private boolean hasModified = false;
private Button btnAllowExport;
}
|
package com.ikanow.aleph2.data_import_manager.stream_enrichment.actors;
import java.io.File;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import scala.PartialFunction;
import scala.Tuple2;
import scala.runtime.BoxedUnit;
import akka.actor.AbstractActor;
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.japi.pf.ReceiveBuilder;
import com.ikanow.aleph2.data_import.services.StreamingEnrichmentContext;
import com.ikanow.aleph2.data_import.stream_enrichment.storm.PassthroughTopology;
import com.ikanow.aleph2.data_import_manager.services.DataImportActorContext;
import com.ikanow.aleph2.data_import_manager.stream_enrichment.services.IStormController;
import com.ikanow.aleph2.data_import_manager.stream_enrichment.utils.StreamErrorUtils;
import com.ikanow.aleph2.data_import_manager.utils.ClassloaderUtils;
import com.ikanow.aleph2.data_import_manager.utils.JarCacheUtils;
import com.ikanow.aleph2.data_import_manager.utils.StormControllerUtil;
import com.ikanow.aleph2.data_model.interfaces.data_import.IEnrichmentStreamingModule;
import com.ikanow.aleph2.data_model.interfaces.data_import.IEnrichmentStreamingTopology;
import com.ikanow.aleph2.data_model.interfaces.data_import.IHarvestContext;
import com.ikanow.aleph2.data_model.interfaces.data_import.IHarvestTechnologyModule;
import com.ikanow.aleph2.data_model.interfaces.data_services.IManagementDbService;
import com.ikanow.aleph2.data_model.interfaces.data_services.IStorageService;
import com.ikanow.aleph2.data_model.objects.data_import.DataBucketBean;
import com.ikanow.aleph2.data_model.objects.shared.BasicMessageBean;
import com.ikanow.aleph2.data_model.objects.shared.GlobalPropertiesBean;
import com.ikanow.aleph2.data_model.objects.shared.SharedLibraryBean;
import com.ikanow.aleph2.data_model.utils.BeanTemplateUtils;
import com.ikanow.aleph2.data_model.utils.CrudUtils;
import com.ikanow.aleph2.data_model.utils.ErrorUtils;
import com.ikanow.aleph2.data_model.utils.Optionals;
import com.ikanow.aleph2.data_model.utils.Patterns;
import com.ikanow.aleph2.data_model.utils.Tuples;
import com.ikanow.aleph2.data_model.utils.BeanTemplateUtils.MethodNamingHelper;
import com.ikanow.aleph2.data_model.utils.CrudUtils.QueryComponent;
import com.ikanow.aleph2.data_model.utils.CrudUtils.SingleQueryComponent;
import com.ikanow.aleph2.distributed_services.services.ICoreDistributedServices;
import com.ikanow.aleph2.management_db.data_model.BucketActionMessage;
import com.ikanow.aleph2.management_db.data_model.BucketActionReplyMessage;
import com.ikanow.aleph2.management_db.data_model.BucketActionMessage.BucketActionOfferMessage;
import com.ikanow.aleph2.management_db.data_model.BucketActionReplyMessage.BucketActionHandlerMessage;
import fj.data.Either;
import fj.data.Validation;
/** This actor is responsible for supervising the job of handling changes to data
* buckets on the "data import manager" end - specifically vs streaming enrichment (see harvest.DataBucketChangeActor for harvest related control)
* @author acp
*/
@SuppressWarnings("unused")
public class DataBucketChangeActor extends AbstractActor {
private static final Logger _logger = LogManager.getLogger();
// Services
protected final DataImportActorContext _context;
protected final IManagementDbService _management_db;
protected final ICoreDistributedServices _core_distributed_services;
protected final ActorSystem _actor_system;
protected final GlobalPropertiesBean _globals;
protected final IStorageService _fs;
protected final IStormController _storm_controller;
/** The actor constructor - at some point all these things should be inserted by injection
*/
public DataBucketChangeActor() {
_context = DataImportActorContext.get();
_core_distributed_services = _context.getDistributedServices();
_actor_system = _core_distributed_services.getAkkaSystem();
_management_db = _context.getServiceContext().getCoreManagementDbService().readOnlyVersion();
_globals = _context.getGlobalProperties();
_fs = _context.getServiceContext().getStorageService();
_storm_controller = _context.getStormController();
}
// Stateless actor
/* (non-Javadoc)
* @see akka.actor.AbstractActor#receive()
*/
@Override
public PartialFunction<Object, BoxedUnit> receive() {
return ReceiveBuilder
.match(BucketActionMessage.class,
m -> !m.handling_clients().isEmpty() && !m.handling_clients().contains(_context.getInformationService().getHostname()),
__ -> {}) // (do nothing if it's not for me)
.match(BucketActionOfferMessage.class,
m -> {
_logger.info(ErrorUtils.get("Actor {0} received offer message {1} from {2}", this.self(), m.getClass().getSimpleName(), this.sender()));
final ActorRef closing_sender = this.sender();
final ActorRef closing_self = this.self();
final String hostname = _context.getInformationService().getHostname();
// (this isn't async so doesn't require any futures)
final boolean accept_or_ignore = new File(_globals.local_yarn_config_dir() + File.separator + "storm.yaml").exists();
final BucketActionReplyMessage reply =
accept_or_ignore
? new BucketActionReplyMessage.BucketActionWillAcceptMessage(hostname)
: new BucketActionReplyMessage.BucketActionIgnoredMessage(hostname);
closing_sender.tell(reply, closing_self);
})
.match(BucketActionMessage.class,
m -> {
_logger.info(ErrorUtils.get("Actor {0} received message {1} from {2}", this.self(), m.getClass().getSimpleName(), this.sender()));
final ActorRef closing_sender = this.sender();
final ActorRef closing_self = this.self();
final String hostname = _context.getInformationService().getHostname();
// (cacheJars can't throw checked or unchecked in this thread, only from within exceptions)
cacheJars(m.bucket(), _management_db, _globals, _fs, hostname, m)
.thenComposeAsync(err_or_map -> {
final StreamingEnrichmentContext e_context = _context.getNewStreamingEnrichmentContext();
final Validation<BasicMessageBean, IEnrichmentStreamingTopology> err_or_tech_module =
getStreamingTopology(m.bucket(), m, hostname, err_or_map);
final CompletableFuture<BucketActionReplyMessage> ret = talkToStream(_storm_controller, m.bucket(), m, err_or_tech_module, err_or_map, hostname, e_context, _globals.local_yarn_config_dir());
return ret;
})
.thenAccept(reply -> { // (reply can contain an error or successful reply, they're the same bean type)
closing_sender.tell(reply, closing_self);
})
.exceptionally(e -> { // another bit of error handling that shouldn't ever be called but is a useful backstop
final BasicMessageBean error_bean =
StreamErrorUtils.buildErrorMessage(hostname, m,
ErrorUtils.getLongForm(StreamErrorUtils.STREAM_UNKNOWN_ERROR, e, m.bucket().full_name())
);
closing_sender.tell(new BucketActionHandlerMessage(hostname, error_bean), closing_self);
return null;
})
;
})
.build();
}
// Functional code - control logic
protected static CompletableFuture<BucketActionReplyMessage> talkToStream(
final IStormController storm_controller,
final DataBucketBean bucket,
final BucketActionMessage m,
final Validation<BasicMessageBean, IEnrichmentStreamingTopology> err_or_user_topology,
final Validation<BasicMessageBean, Map<String, Tuple2<SharedLibraryBean, String>>> err_or_map,
final String source,
final StreamingEnrichmentContext context,
final String yarn_config_dir
)
{
try {
//handle getting the user libs
final List<String> user_lib_paths = err_or_map.<List<String>>validation(
fail -> Collections.emptyList() // (going to die soon anyway)
,
success -> success.values().stream().map(tuple -> tuple._2).collect(Collectors.toList())
);
return err_or_user_topology.<CompletableFuture<BucketActionReplyMessage>>validation(
//ERROR getting enrichment topology
error -> {
return CompletableFuture.completedFuture(new BucketActionHandlerMessage(source, error));
},
//NORMAL grab enrichment topology
enrichment_toplogy -> {
context.setBucket(bucket);
context.setUserTopologyEntryPoint(err_or_user_topology.success().getClass().getName());
return Patterns.match(m).<CompletableFuture<BucketActionReplyMessage>>andReturn()
.when(BucketActionMessage.DeleteBucketActionMessage.class, msg -> {
return StormControllerUtil.stopJob( storm_controller, bucket);
})
.when(BucketActionMessage.NewBucketActionMessage.class, msg -> {
return StormControllerUtil.startJob(storm_controller, bucket, context, user_lib_paths, enrichment_toplogy);
})
.when(BucketActionMessage.UpdateBucketActionMessage.class, msg -> {
if ( msg.is_enabled() )
return StormControllerUtil.restartJob(storm_controller, bucket, context, user_lib_paths, enrichment_toplogy);
else
return StormControllerUtil.stopJob(storm_controller, bucket);
})
.when(BucketActionMessage.UpdateBucketStateActionMessage.class, msg -> {
if ( msg.is_suspended() )
return StormControllerUtil.stopJob(storm_controller, bucket);
else
return StormControllerUtil.startJob(storm_controller, bucket, context, user_lib_paths, enrichment_toplogy);
})
.otherwise(msg -> {
return CompletableFuture.completedFuture(
new BucketActionHandlerMessage(source, new BasicMessageBean(new Date(), false, null, "Unknown message", 0, "Unknown message", null)));
});
});
} catch (Throwable e) { // (trying to use Validation to avoid this, but just in case...)
return CompletableFuture.completedFuture(
new BucketActionHandlerMessage(source, new BasicMessageBean(new Date(), false, null, ErrorUtils.getLongForm("Error loading streaming class: {0}", e), 0, ErrorUtils.getLongForm("Error loading streaming class: {0}", e), null)));
}
}
// Functional code - Utility
/** Talks to the topology module - this top level function just sets the classloader up and creates the module,
* then calls talkToStream to do the talking
* @param bucket
* @param libs
* @param harvest_tech_only
* @param m
* @param source
* @return
*/
protected static Validation<BasicMessageBean, IEnrichmentStreamingTopology> getStreamingTopology(
final DataBucketBean bucket,
final BucketActionMessage m,
final String source,
final Validation<BasicMessageBean, Map<String, Tuple2<SharedLibraryBean, String>>> err_or_libs // "pipeline element"
)
{
try {
return err_or_libs.<Validation<BasicMessageBean, IEnrichmentStreamingTopology>>validation(
//Error:
error -> Validation.fail(error)
,
// Normal
libs -> {
// Easy case, if libs is empty then use the default streaming topology
if (libs.isEmpty()) {
return Validation.success(new PassthroughTopology());
}
final Tuple2<SharedLibraryBean, String> libbean_path = libs.values().stream()
.filter(t2 -> (null != t2._1()) &&
(null != Optional.ofNullable(t2._1().streaming_enrichment_entry_point()).orElse(t2._1().misc_entry_point())))
.findFirst()
.orElse(null);
if ((null == libbean_path) || (null == libbean_path._2())) { // Nice easy error case, probably can't ever happen
return Validation.fail(
StreamErrorUtils.buildErrorMessage(source, m,
StreamErrorUtils.SHARED_LIBRARY_NAME_NOT_FOUND, bucket.full_name(), "(unknown)"));
}
final Validation<BasicMessageBean, IEnrichmentStreamingTopology> ret_val =
ClassloaderUtils.getFromCustomClasspath(IEnrichmentStreamingTopology.class,
Optional.ofNullable(libbean_path._1().streaming_enrichment_entry_point()).orElse(libbean_path._1().misc_entry_point()),
Optional.of(libbean_path._2()),
libs.values().stream().map(lp -> lp._2()).collect(Collectors.toList()),
source, m);
return ret_val;
});
}
catch (Throwable t) {
return Validation.fail(
StreamErrorUtils.buildErrorMessage(source, m,
ErrorUtils.getLongForm(StreamErrorUtils.ERROR_LOADING_CLASS, t, bucket.harvest_technology_name_or_id())));
}
}
/** Given a bucket ...returns either - a future containing the first error encountered, _or_ a map (both name and id as keys) of path names
* (and guarantee that the file has been cached when the future completes)
* @param bucket
* @param management_db
* @param globals
* @param fs
* @param handler_for_errors
* @param msg_for_errors
* @return a future containing the first error encountered, _or_ a map (both name and id as keys) of path names
*/
@SuppressWarnings("unchecked")
protected static <M> CompletableFuture<Validation<BasicMessageBean, Map<String, Tuple2<SharedLibraryBean, String>>>>
cacheJars(
final DataBucketBean bucket,
final IManagementDbService management_db,
final GlobalPropertiesBean globals,
final IStorageService fs,
final String handler_for_errors,
final M msg_for_errors
)
{
try {
MethodNamingHelper<SharedLibraryBean> helper = BeanTemplateUtils.from(SharedLibraryBean.class);
final Optional<QueryComponent<SharedLibraryBean>> spec = getQuery(bucket);
if (!spec.isPresent()) {
return CompletableFuture.completedFuture(Validation.<BasicMessageBean, Map<String, Tuple2<SharedLibraryBean, String>>>success(Collections.emptyMap()));
}
return management_db.getSharedLibraryStore().getObjectsBySpec(
spec.get(),
Arrays.asList(
helper.field(SharedLibraryBean::_id),
helper.field(SharedLibraryBean::path_name),
helper.field(SharedLibraryBean::misc_entry_point),
helper.field(SharedLibraryBean::streaming_enrichment_entry_point)
),
true)
.thenComposeAsync(cursor -> {
// This is a map of futures from the cache call - either an error or the path name
// note we use a tuple of (id, name) as the key and then flatten out later
final Map<Tuple2<String, String>, Tuple2<SharedLibraryBean, CompletableFuture<Validation<BasicMessageBean, String>>>> map_of_futures =
StreamSupport.stream(cursor.spliterator(), true)
.filter(lib -> {
return true;
})
.collect(Collectors.<SharedLibraryBean, Tuple2<String, String>, Tuple2<SharedLibraryBean, CompletableFuture<Validation<BasicMessageBean, String>>>>
toMap(
// want to keep both the name and id versions - will flatten out below
lib -> Tuples._2T(lib.path_name(), lib._id()), //(key)
// spin off a future in which the file is being copied - save the shared library bean also
lib -> Tuples._2T(lib, // (value)
JarCacheUtils.getCachedJar(globals.local_cached_jar_dir(), lib, fs, handler_for_errors, msg_for_errors))));
// denest from map of futures to future of maps, also handle any errors here:
// (some sort of "lift" function would be useful here - this are a somewhat inelegant few steps)
final CompletableFuture<Validation<BasicMessageBean, String>>[] futures =
(CompletableFuture<Validation<BasicMessageBean, String>>[]) map_of_futures
.values()
.stream().map(t2 -> t2._2()).collect(Collectors.toList())
.toArray(new CompletableFuture[0]);
// (have to embed this thenApply instead of bringing it outside as part of the toCompose chain, because otherwise we'd lose map_of_futures scope)
return CompletableFuture.allOf(futures).<Validation<BasicMessageBean, Map<String, Tuple2<SharedLibraryBean, String>>>>thenApply(f -> {
try {
final Map<String, Tuple2<SharedLibraryBean, String>> almost_there = map_of_futures.entrySet().stream()
.flatMap(kv -> {
final Validation<BasicMessageBean, String> ret = kv.getValue()._2().join(); // (must have already returned if here
return ret.<Stream<Tuple2<String, Tuple2<SharedLibraryBean, String>>>>
validation(
//Error:
err -> { throw new RuntimeException(err.message()); } // (not ideal, but will do)
,
// Normal:
s -> {
return Arrays.asList(
Tuples._2T(kv.getKey()._1(), Tuples._2T(kv.getValue()._1(), s)), // result object with path_name
Tuples._2T(kv.getKey()._2(), Tuples._2T(kv.getValue()._1(), s))) // result object with id
.stream();
});
})
.collect(Collectors.<Tuple2<String, Tuple2<SharedLibraryBean, String>>, String, Tuple2<SharedLibraryBean, String>>
toMap(
idname_path -> idname_path._1(), //(key)
idname_path -> idname_path._2() // (value)
))
;
return Validation.<BasicMessageBean, Map<String, Tuple2<SharedLibraryBean, String>>>success(almost_there);
}
catch (Exception e) { // handle the exception thrown above containing the message bean from whatever the original error was!
return Validation.<BasicMessageBean, Map<String, Tuple2<SharedLibraryBean, String>>>fail(
StreamErrorUtils.buildErrorMessage(handler_for_errors.toString(), msg_for_errors,
e.getMessage()));
}
});
});
}
catch (Throwable e) { // (can only occur if the DB call errors)
return CompletableFuture.completedFuture(
Validation.fail(StreamErrorUtils.buildErrorMessage(handler_for_errors.toString(), msg_for_errors,
ErrorUtils.getLongForm(StreamErrorUtils.ERROR_CACHING_SHARED_LIBS, e, bucket.full_name())
)));
}
}
/** Creates a query component to get all the shared library beans i need
* @param bucket
* @param cache_tech_jar_only
* @return
*/
protected static Optional<QueryComponent<SharedLibraryBean>> getQuery(
final DataBucketBean bucket)
{
final Stream<SingleQueryComponent<SharedLibraryBean>> libs =
Optionals.ofNullable(
Optional.ofNullable(bucket.streaming_enrichment_topology())
.map(t -> t.library_ids_or_names())
.orElse(Collections.emptyList()))
.stream()
.map(name -> {
return CrudUtils.anyOf(SharedLibraryBean.class)
.when(SharedLibraryBean::_id, name)
.when(SharedLibraryBean::path_name, name);
});
final CrudUtils.MultiQueryComponent<SharedLibraryBean> mqc = CrudUtils.<SharedLibraryBean>anyOf(libs);
return mqc.getElements().isEmpty() ? Optional.empty() : Optional.of(mqc);
}
}
|
package org.deviceconnect.android.deviceplugin.host.profile;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.deviceconnect.android.message.MessageUtils;
import org.deviceconnect.android.profile.SettingsProfile;
import org.deviceconnect.message.DConnectMessage;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.provider.Settings;
import android.util.Log;
/**
* Settings.
*
* @author NTT DOCOMO, INC.
*/
public class HostSettingsProfile extends SettingsProfile {
/** Light Level. */
private static final int MAX_LIGHT_LEVEL = 255;
private SimpleDateFormat mDateFormat =
new SimpleDateFormat("yyyy'-'MM'-'dd' 'kk':'mm':'ss'+0900'",
Locale.getDefault());
@Override
protected boolean onGetSoundVolume(final Intent request, final Intent response, final String serviceId,
final VolumeKind kind) {
if (serviceId == null) {
createEmptyServiceId(response);
} else if (!checkServiceId(serviceId)) {
createNotFoundService(response);
} else {
AudioManager manager = (AudioManager) getContext()
.getSystemService(Context.AUDIO_SERVICE);
double volume = 0.0;
double maxVolume = 1.0;
if (kind == VolumeKind.ALARM) {
volume = manager.getStreamVolume(AudioManager.STREAM_ALARM);
maxVolume = manager.getStreamMaxVolume(AudioManager.STREAM_ALARM);
setResult(response, DConnectMessage.RESULT_OK);
setVolumeLevel(response, volume / maxVolume);
} else if (kind == VolumeKind.CALL) {
volume = manager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
maxVolume = manager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
setResult(response, DConnectMessage.RESULT_OK);
setVolumeLevel(response, volume / maxVolume);
} else if (kind == VolumeKind.RINGTONE) {
volume = manager.getStreamVolume(AudioManager.STREAM_RING);
maxVolume = manager.getStreamMaxVolume(AudioManager.STREAM_RING);
setResult(response, DConnectMessage.RESULT_OK);
setVolumeLevel(response, volume / maxVolume);
} else if (kind == VolumeKind.MAIL) {
volume = manager.getStreamVolume(AudioManager.STREAM_RING);
maxVolume = manager.getStreamMaxVolume(AudioManager.STREAM_RING);
setResult(response, DConnectMessage.RESULT_OK);
setVolumeLevel(response, volume / maxVolume);
} else if (kind == VolumeKind.MEDIA_PLAYER) {
volume = manager.getStreamVolume(AudioManager.STREAM_MUSIC);
maxVolume = manager.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
setResult(response, DConnectMessage.RESULT_OK);
setVolumeLevel(response, volume / maxVolume);
} else if (kind == VolumeKind.OTHER) {
MessageUtils.setNotSupportAttributeError(response,
"volume type is not support.");
} else {
MessageUtils.setInvalidRequestParameterError(response,
"type is invalid.");
}
}
return true;
}
@Override
protected boolean onGetDate(final Intent request, final Intent response, final String serviceId) {
if (serviceId == null) {
createEmptyServiceId(response);
} else if (!checkServiceId(serviceId)) {
createNotFoundService(response);
} else {
setDate(response, mDateFormat.format(new Date()));
setResult(response, DConnectMessage.RESULT_OK);
}
return true;
}
@Override
protected boolean onGetDisplayLight(final Intent request, final Intent response, final String serviceId) {
if (serviceId == null) {
createEmptyServiceId(response);
} else if (!checkServiceId(serviceId)) {
createNotFoundService(response);
} else {
try {
// (0255)
double level = Settings.System.getInt(getContext().getContentResolver(),
Settings.System.SCREEN_BRIGHTNESS, 0);
double maxLevel = MAX_LIGHT_LEVEL;
setLightLevel(response, level / maxLevel);
setResult(response, DConnectMessage.RESULT_OK);
} catch (Exception e) {
Log.e("ABC", " " , e);
}
}
return true;
}
@Override
protected boolean onGetDisplaySleep(final Intent request, final Intent response, final String serviceId) {
if (serviceId == null) {
createEmptyServiceId(response);
} else if (!checkServiceId(serviceId)) {
createNotFoundService(response);
} else {
int timeout = Settings.System.getInt(getContext().getContentResolver(),
Settings.System.SCREEN_OFF_TIMEOUT, 0);
setTime(response, timeout);
setResult(response, DConnectMessage.RESULT_OK);
}
return true;
}
@Override
protected boolean onPutSoundVolume(final Intent request, final Intent response, final String serviceId,
final VolumeKind kind, final Double level) {
if (serviceId == null) {
createEmptyServiceId(response);
} else if (!checkServiceId(serviceId)) {
createNotFoundService(response);
} else {
if (level == null || level < 0.0 || level > 1.0) {
MessageUtils.setInvalidRequestParameterError(response,
"level is invalid.");
return true;
}
AudioManager manager = (AudioManager) getContext()
.getSystemService(Context.AUDIO_SERVICE);
double maxVolume = 1;
if (kind == VolumeKind.ALARM) {
maxVolume = manager.getStreamMaxVolume(AudioManager.STREAM_ALARM);
manager.setStreamVolume(AudioManager.STREAM_ALARM, (int) (maxVolume * level), 1);
setResult(response, DConnectMessage.RESULT_OK);
} else if (kind == VolumeKind.CALL) {
maxVolume = manager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
manager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, (int) (maxVolume * level), 1);
setResult(response, DConnectMessage.RESULT_OK);
} else if (kind == VolumeKind.RINGTONE) {
maxVolume = manager.getStreamMaxVolume(AudioManager.STREAM_RING);
manager.setStreamVolume(AudioManager.STREAM_RING, (int) (maxVolume * level), 1);
setResult(response, DConnectMessage.RESULT_OK);
} else if (kind == VolumeKind.MAIL) {
maxVolume = manager.getStreamMaxVolume(AudioManager.STREAM_RING);
manager.setStreamVolume(AudioManager.STREAM_RING, (int) (maxVolume * level), 1);
setResult(response, DConnectMessage.RESULT_OK);
} else if (kind == VolumeKind.MEDIA_PLAYER) {
maxVolume = manager.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
manager.setStreamVolume(AudioManager.STREAM_MUSIC, (int) (maxVolume * level), 1);
setResult(response, DConnectMessage.RESULT_OK);
} else if (kind == VolumeKind.OTHER) {
MessageUtils.setNotSupportAttributeError(response,
"volume type is not support.");
} else {
MessageUtils.setInvalidRequestParameterError(response,
"type is invalid.");
}
}
return true;
}
@Override
protected boolean onPutDisplayLight(final Intent request, final Intent response,
final String serviceId, final Double level) {
if (serviceId == null) {
createEmptyServiceId(response);
} else if (!checkServiceId(serviceId)) {
createNotFoundService(response);
} else {
if (level == null || level < 0 || level > 1.0) {
MessageUtils.setInvalidRequestParameterError(response,
"level is invalid.");
return true;
}
Settings.System.putInt(getContext().getContentResolver(),
Settings.System.SCREEN_BRIGHTNESS, (int) (MAX_LIGHT_LEVEL * level));
setResult(response, DConnectMessage.RESULT_OK);
}
return true;
}
@Override
protected boolean onPutDisplaySleep(final Intent request, final Intent response,
final String serviceId, final Integer time) {
if (serviceId == null) {
createEmptyServiceId(response);
} else if (!checkServiceId(serviceId)) {
createNotFoundService(response);
} else {
if (time == null || time < 0.0) {
MessageUtils.setInvalidRequestParameterError(response,
"time is invalid.");
return true;
}
Settings.System.putInt(getContext().getContentResolver(), Settings.System.SCREEN_OFF_TIMEOUT, time);
setResult(response, DConnectMessage.RESULT_OK);
}
return true;
}
/**
* ID.
*
* @param serviceId ID
* @return <code>serviceId</code>IDtruefalse
*/
private boolean checkServiceId(final String serviceId) {
String regex = HostServiceDiscoveryProfile.SERVICE_ID;
Pattern mPattern = Pattern.compile(regex);
Matcher match = mPattern.matcher(serviceId);
return match.find();
}
/**
* ID.
*
* @param response Intent
*/
private void createEmptyServiceId(final Intent response) {
MessageUtils.setEmptyServiceIdError(response);
}
/**
* .
*
* @param response Intent
*/
private void createNotFoundService(final Intent response) {
MessageUtils.setNotFoundServiceError(response);
}
}
|
//$HeadURL$
package org.deegree.services.wfs.encoding;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static org.deegree.protocol.wfs.WFSRequestType.CreateStoredQuery;
import static org.deegree.protocol.wfs.WFSRequestType.DescribeFeatureType;
import static org.deegree.protocol.wfs.WFSRequestType.GetCapabilities;
import static org.deegree.protocol.wfs.WFSRequestType.GetFeatureWithLock;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.HashSet;
import org.junit.Test;
/**
* @author <a href="mailto:goltz@lat-lon.de">Lyn Goltz</a>
*/
public class LimitedSupportedEncodingsTest {
private LimitedSupportedEncodings limitedSupportedEncodings = prepareLimitedSupportedEncodings();
@Test
public void testIsEncodingSupportedUnsupportedRequestType() {
assertFalse( limitedSupportedEncodings.isEncodingSupported( GetFeatureWithLock, "xml" ) );
assertFalse( limitedSupportedEncodings.isEncodingSupported( GetFeatureWithLock, "kvp" ) );
assertFalse( limitedSupportedEncodings.isEncodingSupported( GetFeatureWithLock, "soap" ) );
}
@Test
public void testIsEncodingSupportedSupportedRequestType() {
assertTrue( limitedSupportedEncodings.isEncodingSupported( GetCapabilities, "xml" ) );
assertTrue( limitedSupportedEncodings.isEncodingSupported( GetCapabilities, "kvp" ) );
assertTrue( limitedSupportedEncodings.isEncodingSupported( GetCapabilities, "soap" ) );
}
@Test
public void testIsEncodingSupportedPartlySupportedRequestType() {
assertFalse( limitedSupportedEncodings.isEncodingSupported( DescribeFeatureType, "xml" ) );
assertTrue( limitedSupportedEncodings.isEncodingSupported( DescribeFeatureType, "kvp" ) );
assertTrue( limitedSupportedEncodings.isEncodingSupported( DescribeFeatureType, "soap" ) );
}
private LimitedSupportedEncodings prepareLimitedSupportedEncodings() {
LimitedSupportedEncodings limitedSupportedEncodings = new LimitedSupportedEncodings();
limitedSupportedEncodings.addEnabledEncodings( CreateStoredQuery, new HashSet<String>( singletonList( "kvp" ) ) );
limitedSupportedEncodings.addEnabledEncodings( DescribeFeatureType,
new HashSet<String>( asList( "kvp", "soap" ) ) );
limitedSupportedEncodings.addEnabledEncodings( GetCapabilities,
new HashSet<String>( asList( "kvp", "soap", "xml" ) ) );
return limitedSupportedEncodings;
}
}
|
package org.drools.grid.internal.responsehandlers;
import org.drools.grid.io.Conversation;
import org.drools.grid.io.IoWriter;
import org.drools.grid.io.Message;
import org.drools.grid.io.MessageReceiverHandler;
public class BlockingMessageResponseHandler extends AbstractBlockingResponseHandler
implements
MessageReceiverHandler {
private static final int WAIT_TIME = 60000;
private volatile Message message;
public void messageReceived(Conversation conversation,
Message message) {
this.message = message;
setDone( true );
}
public Message getMessage() {
return getMessage( WAIT_TIME );
}
public Message getMessage(long waitTime) {
boolean done = waitTillDone( waitTime );
if ( !done ) {
throw new RuntimeException( "Timeout : unable to retrieve Object Id" );
}
return this.message;
}
}
|
package net.echinopsii.ariane.community.core.mapping.ds.blueprintsimpl.graphdb;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import net.echinopsii.ariane.community.core.mapping.ds.MappingDSGraphPropertyNames;
import net.echinopsii.ariane.community.core.mapping.ds.blueprintsimpl.cache.MappingDSCache;
import net.echinopsii.ariane.community.core.mapping.ds.blueprintsimpl.cache.MappingDSCacheEntity;
import net.echinopsii.ariane.community.core.mapping.ds.blueprintsimpl.cfg.MappingDSCfgLoader;
import net.echinopsii.ariane.community.core.mapping.ds.blueprintsimpl.domain.*;
import net.echinopsii.ariane.community.core.mapping.ds.dsl.MapperExecutor;
import com.tinkerpop.blueprints.*;
import com.tinkerpop.blueprints.impls.neo4j2.Neo4j2Graph;
import org.neo4j.graphdb.GraphDatabaseService;
import org.neo4j.graphdb.factory.GraphDatabaseFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.*;
public class MappingDSGraphDB {
private final static String BLUEPRINTS_IMPL_N4J = "Neo4j";
private final static Logger log = LoggerFactory.getLogger(MappingDSGraphDB.class);
private static String blpImpl = null;
private static MappingDSGraphDBNeo4jBootstrapper neoBootstrapper = null;
private static Graph ccgraph = null;
private static MapperExecutor executor = null;
private static Vertex idmanager = null;
private static HashMap<Long, Boolean> autocommit = new HashMap<Long, Boolean>();
public static boolean isBlueprintsNeo4j() {
return (blpImpl.equals(BLUEPRINTS_IMPL_N4J));
}
public static boolean init(Dictionary<Object, Object> properties) throws JsonParseException, JsonMappingException, IOException {
if (properties != null) {
return MappingDSCfgLoader.load(properties);
} else {
return false;
}
}
public static boolean start() {
if (MappingDSCfgLoader.getDefaultCfgEntity() != null && (MappingDSCfgLoader.getDefaultCfgEntity().getBlueprintsURL() != null ||
MappingDSCfgLoader.getDefaultCfgEntity().getBlueprintsGraphPath()!=null ||
MappingDSCfgLoader.getDefaultCfgEntity().getBlueprintsNeoConfigFile()!=null)) {
blpImpl = MappingDSCfgLoader.getDefaultCfgEntity().getBlueprintsImplementation();
switch (blpImpl) {
case BLUEPRINTS_IMPL_N4J:
GraphDatabaseService graphDb = null;
if (MappingDSCfgLoader.getDefaultCfgEntity().getBlueprintsGraphPath() != null) {
String graphPath = MappingDSCfgLoader.getDefaultCfgEntity().getBlueprintsGraphPath();
graphDb = new GraphDatabaseFactory().newEmbeddedDatabase( graphPath );
} else if (MappingDSCfgLoader.getDefaultCfgEntity().getBlueprintsNeoConfigFile() != null) {
String neo4jConfigFilePath = MappingDSCfgLoader.getDefaultCfgEntity().getBlueprintsNeoConfigFile();
neoBootstrapper = new MappingDSGraphDBNeo4jBootstrapper().start(neo4jConfigFilePath);
graphDb = neoBootstrapper.getDatabase();
}
if (graphDb!=null) {
ccgraph = new Neo4j2Graph(graphDb);
executor = new MapperExecutor(graphDb);
log.debug("{} is started", new Object[]{ccgraph.toString()});
log.debug(ccgraph.getFeatures().toString());
} else {
log.error("Unable to init Neo4J graph DB !");
return false;
}
break;
default:
log.error("This target MappingDS blueprints implementation {} is not managed by MappingDS Blueprints !", new Object[]{blpImpl});
log.error("List of valid target MappingDS blueprints implementation : {}, {}", new Object[]{BLUEPRINTS_IMPL_N4J});
return false;
}
if (blpImpl.equals(BLUEPRINTS_IMPL_N4J)) {
executor.execute("CYPHER create index on:cluster("+MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID+")");
executor.execute("CYPHER create index on:cluster("+MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY+")");
executor.execute("CYPHER create index on:cluster("+MappingDSGraphPropertyNames.DD_CLUSTER_NAME_KEY+")");
executor.execute("CYPHER create index on:container("+MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID+")");
executor.execute("CYPHER create index on:container("+MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY+")");
executor.execute("CYPHER create index on:container("+MappingDSGraphPropertyNames.DD_CONTAINER_PAGATE_KEY+")");
executor.execute("CYPHER create index on:node("+MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID+")");
executor.execute("CYPHER create index on:node("+MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY+")");
executor.execute("CYPHER create index on:node("+MappingDSGraphPropertyNames.DD_NODE_NAME_KEY+")");
executor.execute("CYPHER create index on:gate("+MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID+")");
executor.execute("CYPHER create index on:gate("+MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY+")");
executor.execute("CYPHER create index on:gate("+MappingDSGraphPropertyNames.DD_GATE_PAEP_KEY+")");
executor.execute("CYPHER create index on:gate("+MappingDSGraphPropertyNames.DD_NODE_NAME_KEY+")");
executor.execute("CYPHER create index on:endpoint("+MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID+")");
executor.execute("CYPHER create index on:endpoint("+MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY+")");
executor.execute("CYPHER create index on:endpoint("+MappingDSGraphPropertyNames.DD_ENDPOINT_URL_KEY+")");
executor.execute("CYPHER create index on:transport("+MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID+")");
executor.execute("CYPHER create index on:transport("+MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY+")");
executor.execute("CYPHER create index on:transport("+MappingDSGraphPropertyNames.DD_TRANSPORT_NAME_KEY+")");
}
if (ccgraph instanceof KeyIndexableGraph) {
log.debug("Create index for {} ...", MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID);
((KeyIndexableGraph) ccgraph).createKeyIndex(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID, Vertex.class);
log.debug("Create index for {} ...", MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY);
((KeyIndexableGraph) ccgraph).createKeyIndex(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY, Vertex.class);
log.debug("Create index for {} ...", MappingDSGraphPropertyNames.DD_CLUSTER_NAME_KEY);
((KeyIndexableGraph) ccgraph).createKeyIndex(MappingDSGraphPropertyNames.DD_CLUSTER_NAME_KEY, Vertex.class);
log.debug("Create index for {} ...", MappingDSGraphPropertyNames.DD_CONTAINER_PAGATE_KEY);
((KeyIndexableGraph) ccgraph).createKeyIndex(MappingDSGraphPropertyNames.DD_CONTAINER_PAGATE_KEY, Vertex.class);
log.debug("Create index for {} ...", MappingDSGraphPropertyNames.DD_NODE_NAME_KEY);
((KeyIndexableGraph) ccgraph).createKeyIndex(MappingDSGraphPropertyNames.DD_NODE_NAME_KEY, Vertex.class);
log.debug("Create index for {} ...", MappingDSGraphPropertyNames.DD_GATE_PAEP_KEY);
((KeyIndexableGraph) ccgraph).createKeyIndex(MappingDSGraphPropertyNames.DD_GATE_PAEP_KEY, Vertex.class);
log.debug("Create index for {} ...", MappingDSGraphPropertyNames.DD_ENDPOINT_URL_KEY);
((KeyIndexableGraph) ccgraph).createKeyIndex(MappingDSGraphPropertyNames.DD_ENDPOINT_URL_KEY, Vertex.class);
log.debug("Create index for {} ...", MappingDSGraphPropertyNames.DD_TRANSPORT_NAME_KEY);
((KeyIndexableGraph) ccgraph).createKeyIndex(MappingDSGraphPropertyNames.DD_TRANSPORT_NAME_KEY, Vertex.class);
log.debug("Create index for {} ...", MappingDSGraphPropertyNames.DD_GRAPH_EDGE_ID);
((KeyIndexableGraph) ccgraph).createKeyIndex(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_ID, Edge.class);
}
log.debug("Retrieve Mapping ID manager vertex if exists...");
idmanager = ccgraph.getVertices(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID, (long) 0).iterator().hasNext() ?
ccgraph.getVertices(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID, (long) 0).iterator().next() : null;
if (idmanager == null) {
log.debug("Initialize Mapping Blueprints DB...");
idmanager = ccgraph.addVertex(null);
idmanager.setProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID, (long) 0);
idmanager.setProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_MAXCUR_KEY, (long) 0);
idmanager.setProperty(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_MAXCUR_KEY, (long) 0);
autocommit();
}
log.debug("Mapping blueprints DB is started !");
return true;
} else {
return false;
}
}
public static void stop() {
try {
MappingDSCache.synchronizeToDB();
} catch (MappingDSGraphDBException E) {
String msg = "Exception while synchronizing MappingDSCache...";
E.printStackTrace();
log.error(msg);
} finally {
String ddgraphinfo = ccgraph.toString();
if (blpImpl.equals(BLUEPRINTS_IMPL_N4J) && MappingDSCfgLoader.getDefaultCfgEntity().getBlueprintsNeoConfigFile() != null) {
neoBootstrapper.stop();
} else {
ccgraph.shutdown();
}
log.debug("{} is stopped", new Object[]{ddgraphinfo});
}
}
public static synchronized void setAutocommit(boolean autocommit) {
Long threadID = Thread.currentThread().getId();
log.debug("Autocommit mode is {} for thread {}", new Object[]{(autocommit ? "activated" : "deactivated"), Thread.currentThread().getName()});
MappingDSGraphDB.autocommit.put(threadID, autocommit);
}
public static void autocommit() {
if (ccgraph instanceof TransactionalGraph) {
Long threadID = Thread.currentThread().getId();
boolean isThreadWithAutoCommitMode = true;
if (autocommit.containsKey(threadID)) {
isThreadWithAutoCommitMode = autocommit.get(threadID);
}
log.debug("Auto commit ({}) for thread {}", new Object[]{isThreadWithAutoCommitMode, threadID});
if (isThreadWithAutoCommitMode) {
log.debug("Auto commit operation...");
((TransactionalGraph) ccgraph).commit();
}
}
}
public static void commit() {
if (ccgraph instanceof TransactionalGraph) {
log.debug("Commit operation...");
((TransactionalGraph) ccgraph).commit();
}
}
public static void autorollback() {
if (ccgraph instanceof TransactionalGraph) {
Long threadID = Thread.currentThread().getId();
boolean isThreadWithAutoCommitMode = true;
if (autocommit.containsKey(threadID)) {
isThreadWithAutoCommitMode = autocommit.get(threadID);
}
if (isThreadWithAutoCommitMode) {
log.error("Auto rollback operation...");
((TransactionalGraph) ccgraph).rollback();
}
}
}
public static void rollback() {
if (ccgraph instanceof TransactionalGraph) {
log.error("Rollback operation...");
((TransactionalGraph) ccgraph).rollback();
}
}
public static Map<String, String> executeQuery(String query) {
Map<String,String> ret = null;
switch (blpImpl) {
case BLUEPRINTS_IMPL_N4J:
ret = executor.execute(query);
break;
default:
log.error("Mapper DSL is not implemented yet for this MappingDS blueprints implementation !", new Object[]{blpImpl});
break;
}
return ret;
}
public static Graph getDDgraph() {
return ccgraph;
}
private static synchronized long incrementVertexMaxCursor() throws MappingDSGraphDBException {
try {
long countProp = idmanager.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_MAXCUR_KEY);
countProp++;
idmanager.setProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_MAXCUR_KEY, countProp++);
} catch (Exception E) {
String msg = "Exception while incrementing vertex max cursor count...";
log.error(msg);
E.printStackTrace();
log.error("Raise exception for rollback...");
throw new MappingDSGraphDBException(msg);
}
return getVertexMaxCursor();
}
private static synchronized void decrementVertexMaxCursor() throws MappingDSGraphDBException {
try {
long countProp = idmanager.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_MAXCUR_KEY);
countProp
idmanager.setProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_MAXCUR_KEY, countProp
} catch (Exception E) {
String msg = "Exception catched while decrementing vertex max cursor count...";
log.error(msg);
E.printStackTrace();
log.error("Raise exception for rollback...");
throw new MappingDSGraphDBException(msg);
}
}
public static synchronized long getVertexMaxCursor() {
return idmanager.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_MAXCUR_KEY);
}
private static synchronized long incrementEdgeMaxCursor() throws MappingDSGraphDBException {
try {
long countProp = idmanager.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_MAXCUR_KEY);
countProp++;
idmanager.setProperty(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_MAXCUR_KEY, countProp);
} catch (Exception E) {
String msg = "Exception catched while incrementing edge max cursor count...";
log.error(msg);
E.printStackTrace();
log.error("Raise exception for rollback...");
throw new MappingDSGraphDBException(msg);
}
return getEdgeMaxCursor();
}
private static synchronized void decrementEdgeMaxCursor() throws MappingDSGraphDBException {
try {
long countProp = idmanager.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_MAXCUR_KEY);
countProp
idmanager.setProperty(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_MAXCUR_KEY, countProp);
} catch (Exception E) {
String msg = "Exception catched while decrementing edge max cursor count...";
log.error(msg);
E.printStackTrace();
log.error("Raise exception for rollback...");
throw new MappingDSGraphDBException(msg);
}
}
public static synchronized long getEdgeMaxCursor() {
return idmanager.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_MAXCUR_KEY);
}
private static synchronized void addVertexFreeID(long id) throws MappingDSGraphDBException {
try {
idmanager.setProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_FREE_IDS_KEY + id, id);
} catch (Exception E) {
String msg = "Exception catched while adding vertex free ID " + id + "...";
log.error(msg);
E.printStackTrace();
log.error("Raise exception for rollback...");
throw new MappingDSGraphDBException(msg);
}
}
private static synchronized boolean hasVertexFreeID() {
for (String key : idmanager.getPropertyKeys()) {
if (key.contains(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_FREE_IDS_KEY)) {
return true;
}
}
return false;
}
private static synchronized long consumeVertexFreeID() {
long ret = 0;
for (String key : idmanager.getPropertyKeys()) {
if (key.contains(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_FREE_IDS_KEY)) {
ret = idmanager.getProperty(key);
idmanager.removeProperty(key);
break;
}
}
return ret;
}
private static synchronized void addEdgeFreeID(long id) throws MappingDSGraphDBException {
try {
idmanager.setProperty(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_FREE_IDS_KEY + id, id);
} catch (Exception E) {
String msg = "Exception catched while adding edge free ID " + id + "...";
log.error(msg);
E.printStackTrace();
log.error("Raise exception for rollback...");
throw new MappingDSGraphDBException(msg);
}
}
private static synchronized boolean hasEdgeFreeID() {
for (String key : idmanager.getPropertyKeys()) {
if (key.contains(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_FREE_IDS_KEY)) {
return true;
}
}
return false;
}
private static synchronized long consumeEdgeFreeID() {
long ret = 0;
for (String key : idmanager.getPropertyKeys()) {
if (key.contains(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_FREE_IDS_KEY)) {
ret = idmanager.getProperty(key);
idmanager.removeProperty(key);
break;
}
}
return ret;
}
public static MappingDSCacheEntity saveVertexEntity(MappingDSCacheEntity entity) {
Vertex entityV = null;
long id = 0;
try {
if (!hasVertexFreeID()) {
id = incrementVertexMaxCursor();
} else {
id = consumeVertexFreeID();
}
entityV = ccgraph.addVertex(null);
entityV.setProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID, id);
entity.setElement(entityV);
MappingDSCache.putEntityToCache(entity);
entity.synchronizeToDB();
autocommit();
log.debug("Vertex {} ({}:{}) has been saved on graph {}", new Object[]{entityV.toString(), MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID, id,
ccgraph.toString() + "(" + ccgraph.hashCode() + ")"});
if (log.isTraceEnabled()) {
for (String propKey : entityV.getPropertyKeys()) {
log.trace("Vertex {} property {}: {}", new Object[]{entityV.toString(),propKey,entityV.getProperty(propKey).toString()});
}
}
} catch (Exception E) {
log.error("Exception catched while saving vertex " + id + ".");
E.printStackTrace();
autorollback();
}
return entity;
}
public static Edge createEdge(Vertex source, Vertex destination, String label) throws MappingDSGraphDBException {
Edge edge = null;
long id = 0;
try {
if (!hasEdgeFreeID()) {
id = incrementEdgeMaxCursor();
} else {
id = consumeEdgeFreeID();
}
if (log.isTraceEnabled()) {
for (String propKey : source.getPropertyKeys()) {
log.trace("Source vertex {} property {}: {}", new Object[]{source.toString(),propKey,source.getProperty(propKey).toString()});
}
for (String propKey : destination.getPropertyKeys()) {
log.trace("Destination vertex {} property {}: {}", new Object[]{destination.toString(),propKey,destination.getProperty(propKey).toString()});
}
}
edge = ccgraph.addEdge(null, source, destination, label);
edge.setProperty(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_ID, id);
autocommit();
log.debug("Edge {} ({}:{}) has been saved on graph {}", new Object[]{edge.toString(), MappingDSGraphPropertyNames.DD_GRAPH_EDGE_ID, id,
ccgraph.toString() + "(" + ccgraph.hashCode() + ")"});
if (log.isTraceEnabled()) {
for (String propKey : edge.getPropertyKeys()) {
log.trace("Edge property {}: {}", new Object[]{edge.toString(),propKey,edge.getProperty(propKey).toString()});
}
for (String propKey : source.getPropertyKeys()) {
log.trace("Source vertex {} property {}: {}", new Object[]{source.toString(),propKey,source.getProperty(propKey).toString()});
}
for (String propKey : destination.getPropertyKeys()) {
log.trace("Destination vertex {} property {}: {}", new Object[]{destination.toString(),propKey,destination.getProperty(propKey).toString()});
}
}
} catch (Exception E) {
String msg = "Exception catched while saving edge " + id + ".";
log.error(msg);
E.printStackTrace();
log.error("Raise exception for rollback...");
throw new MappingDSGraphDBException(msg);
}
return edge;
}
public static MappingDSCacheEntity saveEdgeEntity(MappingDSCacheEntity entity, Vertex source, Vertex destination, String label) {
try {
Edge entityE = createEdge(source, destination, label);
entity.setElement(entityE);
MappingDSCache.putEntityToCache(entity);
entity.synchronizeToDB();
autocommit();
} catch (Exception E) {
log.error("Exception catched while saving edge...");
E.printStackTrace();
autorollback();
}
return entity;
}
private static MappingDSCacheEntity getEdgeEntity(Edge edge) {
long id = (long) edge.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_ID);
MappingDSCacheEntity ret = MappingDSCache.getCachedEntity("E" + id);
if (ret == null) {
if (edge.getLabel().equals(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_LINK_LABEL_KEY)) {
ret = new LinkImpl();
ret.setElement(edge);
MappingDSCache.putEntityToCache(ret);
ret.synchronizeFromDB();
}
}
return ret;
}
public static MappingDSCacheEntity getEdgeEntity(long id) {
log.debug("Get cache entity {} if exists ...", new Object[]{"E"+id});
MappingDSCacheEntity ret = MappingDSCache.getCachedEntity("E" + id);
if (ret == null) {
Edge edge = (ccgraph.getEdges(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_ID,id).iterator().hasNext() ?
ccgraph.getEdges(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_ID,id).iterator().next() : null);
if (edge!=null && edge.getLabel().equals(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_LINK_LABEL_KEY)) {
ret = new LinkImpl();
ret.setElement(edge);
MappingDSCache.putEntityToCache(ret);
ret.synchronizeFromDB();
}
}
return ret;
}
private static MappingDSCacheEntity getVertexEntity(Vertex vertex) {
long id = (long) vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID);
String vertexType = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY);
MappingDSCacheEntity ret = MappingDSCache.getCachedEntity("V" + id);
if (ret == null) {
if (vertexType != null) {
switch (vertexType) {
case MappingDSGraphPropertyNames.DD_TYPE_CLUSTER_VALUE:
ret = new ClusterImpl();
break;
case MappingDSGraphPropertyNames.DD_TYPE_CONTAINER_VALUE:
ret = new ContainerImpl();
break;
case MappingDSGraphPropertyNames.DD_TYPE_NODE_VALUE:
ret = new NodeImpl();
break;
case MappingDSGraphPropertyNames.DD_TYPE_GATE_VALUE:
ret = new GateImpl();
break;
case MappingDSGraphPropertyNames.DD_TYPE_ENDPOINT_VALUE:
ret = new EndpointImpl();
break;
case MappingDSGraphPropertyNames.DD_TYPE_TRANSPORT_VALUE:
ret = new TransportImpl();
break;
default:
break;
}
}
if (ret != null) {
ret.setElement(vertex);
MappingDSCache.putEntityToCache(ret);
ret.synchronizeFromDB();
}
}
log.debug("{} : {}", new Object[]{vertexType, ((vertexType.equals(MappingDSGraphPropertyNames.DD_TYPE_CLUSTER_VALUE)) ? ((ClusterImpl)ret).getClusterName() :
((vertexType.equals(MappingDSGraphPropertyNames.DD_TYPE_CONTAINER_VALUE)) ? ((ContainerImpl)ret).getContainerPrimaryAdminGateURL() :
((vertexType.equals(MappingDSGraphPropertyNames.DD_TYPE_NODE_VALUE)) ? ((NodeImpl)ret).getNodeName() :
((vertexType.equals(MappingDSGraphPropertyNames.DD_TYPE_GATE_VALUE)) ? ((GateImpl)ret).getNodeName() :
((vertexType.equals(MappingDSGraphPropertyNames.DD_TYPE_ENDPOINT_VALUE)) ? ((EndpointImpl)ret).getEndpointURL() :
((vertexType.equals(MappingDSGraphPropertyNames.DD_TYPE_TRANSPORT_VALUE)) ? ((TransportImpl)ret).getTransportName() :"!!!!"))))))});
return ret;
}
public static MappingDSCacheEntity getVertexEntity(long id) {
if (id == 0)
return null;
log.debug("Get cache entity {} if exists ...", new Object[]{"V"+id});
MappingDSCacheEntity ret = MappingDSCache.getCachedEntity("V" + id);
if (ret == null) {
log.debug("Get vertex {} from graph {}...", new Object[]{id, ccgraph.toString() + "(" + ccgraph.hashCode() + ")"});
Vertex vertex = (ccgraph.getVertices(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID, id).iterator().hasNext() ?
ccgraph.getVertices(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID, id).iterator().next() : null);
if (vertex != null) {
String vertexType = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY);
if (vertexType != null) {
switch (vertexType) {
case MappingDSGraphPropertyNames.DD_TYPE_CLUSTER_VALUE:
ret = new ClusterImpl();
break;
case MappingDSGraphPropertyNames.DD_TYPE_CONTAINER_VALUE:
ret = new ContainerImpl();
break;
case MappingDSGraphPropertyNames.DD_TYPE_NODE_VALUE:
ret = new NodeImpl();
break;
case MappingDSGraphPropertyNames.DD_TYPE_GATE_VALUE:
ret = new GateImpl();
break;
case MappingDSGraphPropertyNames.DD_TYPE_ENDPOINT_VALUE:
ret = new EndpointImpl();
break;
case MappingDSGraphPropertyNames.DD_TYPE_TRANSPORT_VALUE:
ret = new TransportImpl();
break;
default:
break;
}
}
if (ret != null) {
ret.setElement(vertex);
MappingDSCache.putEntityToCache(ret);
ret.synchronizeFromDB();
}
}
autocommit();
} else {
log.debug("Entity returned from cache {}", new Object[]{ret.toString()});
}
return ret;
}
public static Set<ClusterImpl> getClusters(){
Set<ClusterImpl> ret = new HashSet<ClusterImpl>();
log.debug("Get all clusters from graph {}...", new Object[]{ccgraph.toString() + "(" + ccgraph.hashCode() + ")"});
for (Vertex vertex : ccgraph.getVertices(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY,
MappingDSGraphPropertyNames.DD_TYPE_CLUSTER_VALUE)) {
long id = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID);
ClusterImpl tmp = (ClusterImpl) getVertexEntity(id);
if (tmp == null) {
tmp = new ClusterImpl();
tmp.setElement(vertex);
MappingDSCache.putEntityToCache(tmp);
tmp.synchronizeFromDB();
}
log.debug("Add cluster {} to Set...", new Object[]{id});
ret.add(tmp);
}
autocommit();
return ret;
}
public static Set<ContainerImpl> getContainers() {
Set<ContainerImpl> ret = new HashSet<ContainerImpl>();
log.debug("Get all containers from graph {}...", new Object[]{ccgraph.toString() + "(" + ccgraph.hashCode() + ")"});
for (Vertex vertex : ccgraph.getVertices(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY,
MappingDSGraphPropertyNames.DD_TYPE_CONTAINER_VALUE)) {
long id = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID);
ContainerImpl tmp = (ContainerImpl) getVertexEntity(id);
if (tmp == null) {
tmp = new ContainerImpl();
tmp.setElement(vertex);
MappingDSCache.putEntityToCache(tmp);
tmp.synchronizeFromDB();
}
log.debug("Add container {} to Set...", new Object[]{id});
ret.add(tmp);
}
autocommit();
return ret;
}
public static Set<NodeImpl> getNodes() {
Set<NodeImpl> ret = new HashSet<NodeImpl>();
log.debug("Get all nodes from graph {}...", new Object[]{ccgraph.toString()});
for (Vertex vertex : ccgraph.getVertices(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY,
MappingDSGraphPropertyNames.DD_TYPE_NODE_VALUE)) {
long id = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID);
NodeImpl tmp = (NodeImpl) getVertexEntity(id);
if (tmp == null) {
tmp = new NodeImpl();
tmp.setElement(vertex);
MappingDSCache.putEntityToCache(tmp);
tmp.synchronizeFromDB();
}
log.debug("Add node {} to Set...", new Object[]{id});
ret.add(tmp);
}
for (Vertex vertex : ccgraph.getVertices(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY,
MappingDSGraphPropertyNames.DD_TYPE_GATE_VALUE)) {
long id = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID);
NodeImpl tmp = (NodeImpl) getVertexEntity(id);
if (tmp == null) {
tmp = new NodeImpl();
tmp.setElement(vertex);
MappingDSCache.putEntityToCache(tmp);
tmp.synchronizeFromDB();
}
log.debug("Add node {} to Set...", new Object[]{id});
ret.add(tmp);
}
autocommit();
return ret;
}
public static Set<NodeImpl> getNodes(String key, Object value) {
Set<NodeImpl> ret = new HashSet<NodeImpl>();
log.debug("Get all nodes from graph {}...", new Object[]{ccgraph.toString()});
for (Vertex vertex : ccgraph.getVertices(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY,
MappingDSGraphPropertyNames.DD_TYPE_NODE_VALUE)) {
long id = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID);
NodeImpl tmp = (NodeImpl) getVertexEntity(id);
Object tmpValue = tmp.getNodeProperties().get(key);
if (tmpValue.equals(value)) {
log.debug("Add node {} to Set...", new Object[]{id});
ret.add(tmp);
}
}
for (Vertex vertex : ccgraph.getVertices(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY,
MappingDSGraphPropertyNames.DD_TYPE_GATE_VALUE)) {
long id = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID);
NodeImpl tmp = (NodeImpl) getVertexEntity(id);
Object tmpValue = tmp.getNodeProperties().get(key);
if (tmpValue.equals(value)) {
log.debug("Add node {} to Set...", new Object[]{id});
ret.add(tmp);
}
}
autocommit();
return ret;
}
public static Set<GateImpl> getGates() {
Set<GateImpl> ret = new HashSet<GateImpl>();
log.debug("Get all gates from graph {}...", new Object[]{ccgraph.toString()});
for (Vertex vertex : ccgraph.getVertices(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY,
MappingDSGraphPropertyNames.DD_TYPE_GATE_VALUE)) {
long id = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID);
GateImpl tmp = (GateImpl) getVertexEntity(id);
if (tmp == null) {
tmp = new GateImpl();
tmp.setElement(vertex);
MappingDSCache.putEntityToCache(tmp);
tmp.synchronizeFromDB();
}
log.debug("Add gate {} to Set...", new Object[]{id});
ret.add(tmp);
}
autocommit();
return ret;
}
public static Set<GateImpl> getGates(String key, Object value) {
Set<GateImpl> ret = new HashSet<GateImpl>();
log.debug("Get all gates from graph {}...", new Object[]{ccgraph.toString()});
for (Vertex vertex : ccgraph.getVertices(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY,
MappingDSGraphPropertyNames.DD_TYPE_GATE_VALUE)) {
long id = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID);
GateImpl tmp = (GateImpl) getVertexEntity(id);
Object tmpValue = tmp.getNodeProperties().get(key);
if (tmpValue.equals(value)) {
log.debug("Add gate {} to Set...", new Object[]{id});
ret.add(tmp);
}
}
autocommit();
return ret;
}
public static Set<EndpointImpl> getEndpoints() {
Set<EndpointImpl> ret = new HashSet<EndpointImpl>();
log.debug("Get all endpoints from graph {}...", new Object[]{ccgraph.toString()});
for (Vertex vertex : ccgraph.getVertices(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY,
MappingDSGraphPropertyNames.DD_TYPE_ENDPOINT_VALUE)) {
long id = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID);
EndpointImpl tmp = (EndpointImpl) getVertexEntity(id);
if (tmp == null) {
tmp = new EndpointImpl();
tmp.setElement(vertex);
MappingDSCache.putEntityToCache(tmp);
tmp.synchronizeFromDB();
}
log.debug("Add endpoint {} to Set...", new Object[]{id});
ret.add(tmp);
}
autocommit();
return ret;
}
public static Set<EndpointImpl> getEndpoints(String key, Object value) {
Set<EndpointImpl> ret = new HashSet<EndpointImpl>();
for (Vertex vertex : ccgraph.getVertices(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY,
MappingDSGraphPropertyNames.DD_TYPE_ENDPOINT_VALUE)) {
long id = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID);
log.debug("Test vertex {}...", new Object[]{id});
EndpointImpl tmp = (EndpointImpl) getVertexEntity(id);
Object tmpValue = (tmp.getEndpointProperties() != null) ? tmp.getEndpointProperties().get(key) : null;
if (tmpValue != null && tmpValue.equals(value)) {
log.debug("Add endpoint {} to Set...", new Object[]{id});
ret.add(tmp);
}
}
autocommit();
return ret;
}
public static Set<TransportImpl> getTransports() {
Set<TransportImpl> ret = new HashSet<TransportImpl>();
log.debug("Get all transports from graph {}...", new Object[]{ccgraph.toString()});
for (Vertex vertex : ccgraph.getVertices(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY,
MappingDSGraphPropertyNames.DD_TYPE_TRANSPORT_VALUE)) {
long id = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID);
TransportImpl tmp = (TransportImpl) getVertexEntity(id);
if (tmp == null) {
tmp = new TransportImpl();
tmp.setElement(vertex);
MappingDSCache.putEntityToCache(tmp);
tmp.synchronizeFromDB();
}
log.debug("Add transport {} to Set...", new Object[]{id});
ret.add(tmp);
}
autocommit();
return ret;
}
public static ClusterImpl getIndexedCluster(String clusterName) {
MappingDSCacheEntity ret = MappingDSCache.getClusterFromCache(clusterName);
if (ret == null) {
Vertex vertex = ccgraph.getVertices(MappingDSGraphPropertyNames.DD_CLUSTER_NAME_KEY, clusterName).iterator().hasNext() ?
ccgraph.getVertices(MappingDSGraphPropertyNames.DD_CLUSTER_NAME_KEY, clusterName).iterator().next() : null;
if (vertex != null) {
String vertexType = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY);
switch (vertexType) {
case MappingDSGraphPropertyNames.DD_TYPE_CLUSTER_VALUE:
ret = new ClusterImpl();
break;
default:
break;
}
if (ret != null) {
ret.setElement(vertex);
MappingDSCache.putEntityToCache(ret);
ret.synchronizeFromDB();
}
}
}
autocommit();
return (ClusterImpl) ret;
}
public static Set<NodeImpl> getIndexedNodes(String name) {
Set<NodeImpl> ret = new HashSet<NodeImpl>();
for (Vertex vertex : ccgraph.getVertices(MappingDSGraphPropertyNames.DD_NODE_NAME_KEY, name)) {
String vertexType = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY);
NodeImpl tmp = null;
switch (vertexType) {
case MappingDSGraphPropertyNames.DD_TYPE_NODE_VALUE:
tmp = new NodeImpl();
break;
default:
break;
}
if (tmp != null) {
tmp.setElement(vertex);
MappingDSCache.putEntityToCache(tmp);
tmp.synchronizeFromDB();
ret.add(tmp);
}
}
autocommit();
return ret;
}
public static EndpointImpl getIndexedEndpoint(String url) {
MappingDSCacheEntity ret = MappingDSCache.getEndpointFromCache(url);
if (ret == null && ccgraph != null) {
Vertex vertex = ccgraph.getVertices(MappingDSGraphPropertyNames.DD_ENDPOINT_URL_KEY, url).iterator().hasNext() ?
ccgraph.getVertices(MappingDSGraphPropertyNames.DD_ENDPOINT_URL_KEY, url).iterator().next() : null;
if (vertex != null) {
String vertexType = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY);
switch (vertexType) {
case MappingDSGraphPropertyNames.DD_TYPE_ENDPOINT_VALUE:
ret = new EndpointImpl();
default:
break;
}
if (ret != null) {
ret.setElement(vertex);
MappingDSCache.putEntityToCache(ret);
ret.synchronizeFromDB();
}
}
autocommit();
}
return (EndpointImpl) ret;
}
public static TransportImpl getIndexedTransport(String transportName) {
MappingDSCacheEntity ret = MappingDSCache.getTransportFromCache(transportName);
if (ret == null && ccgraph != null) {
Vertex vertex = ccgraph.getVertices(MappingDSGraphPropertyNames.DD_TRANSPORT_NAME_KEY, transportName).iterator().hasNext() ?
ccgraph.getVertices(MappingDSGraphPropertyNames.DD_TRANSPORT_NAME_KEY, transportName).iterator().next() : null;
if (vertex != null) {
String vertexType = vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY);
switch (vertexType) {
case MappingDSGraphPropertyNames.DD_TYPE_TRANSPORT_VALUE:
ret = new TransportImpl();
default:
break;
}
if (ret != null) {
ret.setElement(vertex);
MappingDSCache.putEntityToCache(ret);
ret.synchronizeFromDB();
}
}
autocommit();
}
return (TransportImpl) ret;
}
public static MappingDSCacheEntity getLink(long id) {
if (id == 0)
return null;
MappingDSCacheEntity ret = MappingDSCache.getCachedEntity("E" + id);
if (ret == null && ccgraph != null) {
Edge edge = ccgraph.getEdges(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_ID, id).iterator().hasNext() ?
ccgraph.getEdges(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_ID, id).iterator().next() : null;
if (edge != null && edge.getLabel().equals(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_LINK_LABEL_KEY)) {
ret = new LinkImpl();
ret.setElement(edge);
MappingDSCache.putEntityToCache(ret);
ret.synchronizeFromDB();
}
autocommit();
}
return ret;
}
public static Set<LinkImpl> getLinks() {
Set<LinkImpl> ret = new HashSet<LinkImpl>();
for (Edge edge : ccgraph.getEdges()) {
if (edge.getLabel().equals(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_LINK_LABEL_KEY)) {
long id = edge.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_ID);
LinkImpl tmp = (LinkImpl) getLink(id);
if (tmp == null) {
tmp = new LinkImpl();
tmp.setElement(edge);
MappingDSCache.putEntityToCache(tmp);
tmp.synchronizeFromDB();
}
ret.add(tmp);
}
}
autocommit();
return ret;
}
private static void removeVertex(Vertex vertex) throws MappingDSGraphDBException {
long vertexID = (long) vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID);
ccgraph.removeVertex(vertex);
if (vertexID == getVertexMaxCursor()) {
decrementVertexMaxCursor();
} else {
addVertexFreeID(vertexID);
}
autocommit();
}
private static void removeEdge(Edge edge) throws MappingDSGraphDBException {
long edgeID = (long) edge.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_EDGE_ID);
ccgraph.removeEdge(edge);
if (edgeID == getEdgeMaxCursor()) {
decrementEdgeMaxCursor();
} else {
addEdgeFreeID(edgeID);
}
autocommit();
}
public static void deleteEntity(MappingDSCacheEntity entity) {
Element elem = entity.getElement();
try {
if (elem != null) {
if (elem instanceof Vertex) {
Vertex vertex = (Vertex) elem;
for (Edge edge : vertex.getEdges(Direction.OUT, MappingDSGraphPropertyNames.DD_GRAPH_EDGE_OWNS_LABEL_KEY)) {
Vertex ownedVertex = edge.getVertex(Direction.IN);
deleteEntity(getVertexEntity(ownedVertex));
}
for (Edge edge : vertex.getEdges(Direction.IN, MappingDSGraphPropertyNames.DD_GRAPH_EDGE_OWNS_LABEL_KEY)) {
MappingDSCacheEntity owningEntity = getVertexEntity(edge.getVertex(Direction.OUT));
removeEdge(edge);
owningEntity.synchronizeFromDB();
}
for (Edge edge : vertex.getEdges(Direction.BOTH, MappingDSGraphPropertyNames.DD_GRAPH_EDGE_TWIN_LABEL_KEY)) {
MappingDSCacheEntity twinEntity ;
Vertex v = edge.getVertex(Direction.OUT);
if (!v.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID).equals(vertex.getProperty(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_ID)))
twinEntity = getVertexEntity(v);
else
twinEntity = getVertexEntity(edge.getVertex(Direction.IN));
removeEdge(edge);
twinEntity.synchronizeFromDB();
}
for (Edge edge : vertex.getEdges(Direction.BOTH, MappingDSGraphPropertyNames.DD_GRAPH_EDGE_LINK_LABEL_KEY))
deleteEntity(getEdgeEntity(edge));
MappingDSCache.removeEntityFromCache(entity);
removeVertex(vertex);
} else if (elem instanceof Edge) {
MappingDSCache.removeEntityFromCache(entity);
removeEdge((Edge) elem);
}
}
autocommit();
} catch (Exception E) {
log.error("Exception catched while deleting entity " + entity.getElement().getId() + "...");
E.printStackTrace();
autorollback();
}
}
public static void clear() {
try {
for (Edge edge : ccgraph.getEdges()) {
ccgraph.removeEdge(edge);
}
for (Vertex vertex : ccgraph.getVertices()) {
ccgraph.removeVertex(vertex);
}
autocommit();
} catch (Exception E) {
log.error("Exception catched while clearing DB Graph...");
E.printStackTrace();
autorollback();
}
}
}
|
package org.ovirt.engine.ui.webadmin.section.main.view.tab;
import org.ovirt.engine.core.common.businessentities.QuotaEnforcementTypeEnum;
import org.ovirt.engine.core.common.businessentities.StoragePool;
import org.ovirt.engine.core.common.businessentities.storage.Disk;
import org.ovirt.engine.core.common.businessentities.storage.DiskStorageType;
import org.ovirt.engine.core.searchbackend.DiskConditionFieldAutoCompleter;
import org.ovirt.engine.ui.common.idhandler.ElementIdHandler;
import org.ovirt.engine.ui.common.uicommon.model.MainModelProvider;
import org.ovirt.engine.ui.common.widget.action.CommandLocation;
import org.ovirt.engine.ui.common.widget.table.column.AbstractDiskSizeColumn;
import org.ovirt.engine.ui.common.widget.table.column.AbstractTextColumn;
import org.ovirt.engine.ui.common.widget.table.header.ImageResourceHeader;
import org.ovirt.engine.ui.common.widget.uicommon.disks.DisksViewColumns;
import org.ovirt.engine.ui.common.widget.uicommon.disks.DisksViewRadioGroup;
import org.ovirt.engine.ui.uicommonweb.UICommand;
import org.ovirt.engine.ui.uicommonweb.models.CommonModel;
import org.ovirt.engine.ui.uicommonweb.models.EntityModel;
import org.ovirt.engine.ui.uicommonweb.models.SystemTreeItemModel;
import org.ovirt.engine.ui.uicommonweb.models.SystemTreeItemType;
import org.ovirt.engine.ui.uicommonweb.models.disks.DiskListModel;
import org.ovirt.engine.ui.uicompat.Event;
import org.ovirt.engine.ui.uicompat.EventArgs;
import org.ovirt.engine.ui.uicompat.IEventListener;
import org.ovirt.engine.ui.webadmin.ApplicationConstants;
import org.ovirt.engine.ui.webadmin.gin.AssetProvider;
import org.ovirt.engine.ui.webadmin.section.main.presenter.tab.MainTabDiskPresenter;
import org.ovirt.engine.ui.webadmin.section.main.view.AbstractMainTabWithDetailsTableView;
import org.ovirt.engine.ui.webadmin.widget.action.WebAdminButtonDefinition;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.regexp.shared.RegExp;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.ui.RadioButton;
import com.google.gwt.user.client.ui.SimplePanel;
import com.google.inject.Inject;
import com.google.inject.Provider;
public class MainTabDiskView extends AbstractMainTabWithDetailsTableView<Disk, DiskListModel> implements MainTabDiskPresenter.ViewDef {
interface ViewIdHandler extends ElementIdHandler<MainTabDiskView> {
ViewIdHandler idHandler = GWT.create(ViewIdHandler.class);
}
@UiField
SimplePanel tablePanel;
private DisksViewRadioGroup disksViewRadioGroup;
private boolean isQuotaVisible;
private static AbstractTextColumn<Disk> aliasColumn;
private static AbstractTextColumn<Disk> idColumn;
private static AbstractDiskSizeColumn sizeColumn;
private static AbstractTextColumn<Disk> allocationColumn;
private static AbstractTextColumn<Disk> dateCreatedColumn;
private static AbstractTextColumn<Disk> statusColumn;
private static AbstractTextColumn<Disk> lunIdColumn;
private static AbstractTextColumn<Disk> lunSerialColumn;
private static AbstractTextColumn<Disk> lunVendorIdColumn;
private static AbstractTextColumn<Disk> lunProductIdColumn;
private static AbstractTextColumn<Disk> qoutaColumn;
private static AbstractTextColumn<Disk> diskStorageTypeColumn;
private static AbstractTextColumn<Disk> cinderVolumeTypeColumn;
private static AbstractTextColumn<Disk> descriptionColumn;
@Inject
Provider<CommonModel> commonModelProvider;
private final static ApplicationConstants constants = AssetProvider.getConstants();
@Inject
public MainTabDiskView(MainModelProvider<Disk, DiskListModel> modelProvider) {
super(modelProvider);
ViewIdHandler.idHandler.generateAndSetIds(this);
initTableColumns();
initTableButtons();
initTableOverhead();
initWidget(getTable());
disksViewRadioGroup.getCinderButton().removeFromParent();
}
final ClickHandler clickHandler = new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
if (((RadioButton) event.getSource()).getValue()) {
getMainModel().getDiskViewType().setEntity(disksViewRadioGroup.getDiskStorageType());
}
}
};
final IEventListener<EventArgs> diskTypeChangedEventListener = new IEventListener<EventArgs>() {
@Override
public void eventRaised(Event<? extends EventArgs> ev, Object sender, EventArgs args) {
EntityModel diskViewType = (EntityModel) sender;
disksViewRadioGroup.setDiskStorageType((DiskStorageType) diskViewType.getEntity());
if (commonModelProvider.get().getSelectedItem() instanceof DiskListModel) {
onDiskViewTypeChanged();
}
}
};
@Override
public IEventListener<EventArgs> getDiskTypeChangedEventListener() {
return diskTypeChangedEventListener;
}
@Override
public void handleQuotaColumnVisibility() {
isQuotaVisible = false;
SystemTreeItemModel treeItem =
commonModelProvider.get().getSystemTree().getSelectedItem();
if (treeItem != null
&& SystemTreeItemType.DataCenter == treeItem.getType()) {
StoragePool storagePool = (StoragePool) treeItem.getEntity();
if (QuotaEnforcementTypeEnum.DISABLED != storagePool.getQuotaEnforcementType()) {
isQuotaVisible = true;
}
}
onDiskViewTypeChanged();
}
void onDiskViewTypeChanged() {
boolean all = disksViewRadioGroup.getAllButton().getValue();
boolean images = disksViewRadioGroup.getImagesButton().getValue();
boolean luns = disksViewRadioGroup.getLunsButton().getValue();
boolean cinder = disksViewRadioGroup.getCinderButton().getValue();
searchByDiskViewType(disksViewRadioGroup.getDiskStorageType());
getTable().ensureColumnPresent(
aliasColumn, constants.aliasDisk(), all || images || luns || cinder,
"120px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
idColumn, constants.idDisk(), all || images || luns || cinder,
"120px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
DisksViewColumns.bootableDiskColumn,
new ImageResourceHeader(DisksViewColumns.bootableDiskColumn.getDefaultImage(),
constants.bootableDisk()),
all || images || luns || cinder, "30px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
DisksViewColumns.shareableDiskColumn,
new ImageResourceHeader(DisksViewColumns.shareableDiskColumn.getDefaultImage(),
constants.shareable()),
all || images || luns || cinder, "30px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
DisksViewColumns.diskContainersIconColumn, "", all || images || luns || cinder, //$NON-NLS-1$
"30px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
DisksViewColumns.diskContainersColumn, constants.attachedToDisk(), all || images || luns || cinder,
"125px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
DisksViewColumns.storageDomainsColumn, constants.storageDomainsDisk(), images || cinder,
"180px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
sizeColumn, constants.provisionedSizeDisk(), all || images || luns || cinder,
"110px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
allocationColumn, constants.allocationDisk(), images,
"130px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
cinderVolumeTypeColumn, constants.cinderVolumeTypeDisk(), cinder, "80px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
dateCreatedColumn, constants.creationDateDisk(), images || cinder,
"130px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
statusColumn, constants.statusDisk(), images || cinder || all,
"80px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
lunIdColumn, constants.lunIdSanStorage(), luns,
"100px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
lunSerialColumn, constants.serialSanStorage(), luns,
"100px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
lunVendorIdColumn, constants.vendorIdSanStorage(), luns,
"100px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
lunProductIdColumn, constants.productIdSanStorage(), luns,
"100px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
qoutaColumn, constants.quotaDisk(), images && isQuotaVisible, "120px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
diskStorageTypeColumn, constants.typeDisk(), all, "80px"); //$NON-NLS-1$
getTable().ensureColumnPresent(
descriptionColumn, constants.descriptionDisk(), all || images || luns || cinder,
"90px"); //$NON-NLS-1$
}
void initTableColumns() {
getTable().enableColumnResizing();
aliasColumn = DisksViewColumns.getAliasColumn(DiskConditionFieldAutoCompleter.ALIAS);
idColumn = DisksViewColumns.getIdColumn(DiskConditionFieldAutoCompleter.ID);
sizeColumn = DisksViewColumns.getSizeColumn(DiskConditionFieldAutoCompleter.PROVISIONED_SIZE);
allocationColumn = DisksViewColumns.getAllocationColumn(constants.empty());
dateCreatedColumn = DisksViewColumns.getDateCreatedColumn(DiskConditionFieldAutoCompleter.CREATION_DATE);
statusColumn = DisksViewColumns.getStatusColumn(DiskConditionFieldAutoCompleter.STATUS);
lunIdColumn = DisksViewColumns.getLunIdColumn(constants.empty());
lunSerialColumn = DisksViewColumns.getLunSerialColumn(constants.empty());
lunVendorIdColumn = DisksViewColumns.getLunVendorIdColumn(constants.empty());
lunProductIdColumn = DisksViewColumns.getLunProductIdColumn(constants.empty());
qoutaColumn = DisksViewColumns.getQoutaColumn(DiskConditionFieldAutoCompleter.QUOTA);
diskStorageTypeColumn = DisksViewColumns.getDiskStorageTypeColumn(DiskConditionFieldAutoCompleter.DISK_TYPE);
cinderVolumeTypeColumn = DisksViewColumns.getCinderVolumeTypeColumn(null);
descriptionColumn = DisksViewColumns.getDescriptionColumn(DiskConditionFieldAutoCompleter.DESCRIPTION);
}
void initTableOverhead() {
disksViewRadioGroup = new DisksViewRadioGroup();
disksViewRadioGroup.setClickHandler(clickHandler);
disksViewRadioGroup.addStyleName("mtdv_radioGroup_pfly_fix"); //$NON-NLS-1$
getTable().setTableOverhead(disksViewRadioGroup);
getTable().setTableTopMargin(20);
}
void initTableButtons() {
getTable().addActionButton(new WebAdminButtonDefinition<Disk>(constants.newDisk()) {
@Override
protected UICommand resolveCommand() {
return getMainModel().getNewCommand();
}
});
getTable().addActionButton(new WebAdminButtonDefinition<Disk>(constants.removeDisk()) {
@Override
protected UICommand resolveCommand() {
return getMainModel().getRemoveCommand();
}
});
getTable().addActionButton(new WebAdminButtonDefinition<Disk>(constants.moveDisk()) {
@Override
protected UICommand resolveCommand() {
return getMainModel().getMoveCommand();
}
});
getTable().addActionButton(new WebAdminButtonDefinition<Disk>(constants.copyDisk()) {
@Override
protected UICommand resolveCommand() {
return getMainModel().getCopyCommand();
}
});
getTable().addActionButton(new WebAdminButtonDefinition<Disk>(constants.getDiskAlignment(),
CommandLocation.OnlyFromContext) {
@Override
protected UICommand resolveCommand() {
return getMainModel().getScanAlignmentCommand();
}
});
getTable().addActionButton(new WebAdminButtonDefinition<Disk>(constants.exportDisk()) {
@Override
protected UICommand resolveCommand() {
return getMainModel().getExportCommand();
}
});
getTable().addActionButton(new WebAdminButtonDefinition<Disk>(constants.assignQuota()) {
@Override
protected UICommand resolveCommand() {
return getMainModel().getChangeQuotaCommand();
}
});
}
void searchByDiskViewType(Object diskViewType) {
final String disksSearchPrefix = "Disks:"; //$NON-NLS-1$
final String diskTypeSearchPrefix = "disk_type = "; //$NON-NLS-1$
final String searchConjunctionAnd = "and "; //$NON-NLS-1$
final String searchRegexDisksSearchPrefix = "^\\s*(disk(s)?\\s*(:)+)+\\s*"; //$NON-NLS-1$
final String searchRegexDiskTypeClause = "\\s*((and|or)\\s+)?disk_type\\s*=\\s*\\S+"; //$NON-NLS-1$
final String searchRegexStartConjunction = "^\\s*(and|or)\\s*"; //$NON-NLS-1$
final String searchRegexFlags = "ig"; //$NON-NLS-1$
final String space = " "; //$NON-NLS-1$
final String empty = ""; //$NON-NLS-1$
final String colon = ":"; //$NON-NLS-1$
RegExp searchPatternDisksSearchPrefix = RegExp.compile(searchRegexDisksSearchPrefix, searchRegexFlags);
RegExp searchPatternDiskTypeClause = RegExp.compile(searchRegexDiskTypeClause, searchRegexFlags);
RegExp searchPatternStartConjunction = RegExp.compile(searchRegexStartConjunction, searchRegexFlags);
String diskTypePostfix = diskViewType != null ?
((DiskStorageType) diskViewType).name().toLowerCase() + space : null;
String diskTypeClause = diskTypePostfix != null ?
diskTypeSearchPrefix + diskTypePostfix : empty;
String inputSearchString = commonModelProvider.get().getSearchString().trim();
String inputSearchStringPrefix = commonModelProvider.get().getSearchStringPrefix().trim();
if (!inputSearchString.isEmpty() && inputSearchStringPrefix.isEmpty()) {
int indexOfColon = inputSearchString.indexOf(colon);
inputSearchStringPrefix = inputSearchString.substring(0, indexOfColon + 1).trim();
inputSearchString = inputSearchString.substring(indexOfColon + 1).trim();
}
if (inputSearchStringPrefix.isEmpty()) {
inputSearchStringPrefix = disksSearchPrefix;
inputSearchString = empty;
}
String searchStringPrefixRaw = searchPatternDiskTypeClause
.replace(inputSearchStringPrefix, empty).trim();
String searchStringPrefix;
if (diskTypeClause.equals(empty)) {
searchStringPrefix = searchStringPrefixRaw + space;
}
else {
searchStringPrefix = searchStringPrefixRaw + space
+ (searchPatternDisksSearchPrefix.test(searchStringPrefixRaw) ? empty : searchConjunctionAnd)
+ diskTypeClause;
}
inputSearchString = searchPatternDiskTypeClause
.replace(inputSearchString, empty);
inputSearchString = searchPatternStartConjunction
.replace(inputSearchString, empty);
String searchString;
if (searchPatternDisksSearchPrefix.test(searchStringPrefix) || inputSearchString.isEmpty()) {
searchString = inputSearchString;
}
else {
searchString = searchConjunctionAnd + inputSearchString;
}
commonModelProvider.get().setSearchStringPrefix(searchStringPrefix);
commonModelProvider.get().setSearchString(searchString);
getTable().getSelectionModel().clear();
getMainModel().setItems(null);
getMainModel().setSearchString(commonModelProvider.get().getEffectiveSearchString());
getMainModel().search();
}
}
|
package com.camnter.hook.loadedapk.classloader.hook.loadedapk;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.os.Build;
import android.support.annotation.NonNull;
import android.util.DisplayMetrics;
import com.camnter.hook.loadedapk.classloader.host.AssetsUtils;
import java.io.File;
import java.lang.ref.WeakReference;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
/**
* Hook ActivityThread # ArrayMap<String, WeakReference<LoadedApk>> mPackages
* LoadedApk
*
* @author CaMnter
*/
@SuppressWarnings("DanglingJavadoc")
public final class LoadedApkHooker {
/**
* LoadedApk
* ActivityThread # ArrayMap<String, WeakReference<LoadedApk>> mPackages
*
*
*/
public static Map<String, Object> LOADEDAPK_MAP = new HashMap<>();
/**
* Hook ActivityThread # ArrayMap<String, WeakReference<LoadedApk>> mPackages
*
* @param apkFile apkFile
* @param context context
* @throws Exception Exception
*/
@SuppressWarnings("unchecked")
public static void hookLoadedApkForActivityThread(@NonNull final File apkFile,
@NonNull final Context context)
throws Exception {
/**
* ActivityThread
*/
final Class<?> activityThreadClass = Class.forName("android.app.ActivityThread");
final Method currentActivityThreadMethod = activityThreadClass.getDeclaredMethod(
"currentActivityThread");
currentActivityThreadMethod.setAccessible(true);
final Object currentActivityThread = currentActivityThreadMethod.invoke(null);
/**
* ActivityThread # ArrayMap<String, WeakReference<LoadedApk>> mPackages
*/
final Field mPackagesField = activityThreadClass.getDeclaredField("mPackages");
mPackagesField.setAccessible(true);
final Map mPackages = (Map) mPackagesField.get(currentActivityThread);
/**
* CompatibilityInfo # CompatibilityInfo DEFAULT_COMPATIBILITY_INFO
*/
final Class<?> compatibilityInfoClass = Class.forName(
"android.content.res.CompatibilityInfo");
final Field defaultCompatibilityInfoField = compatibilityInfoClass.getDeclaredField(
"DEFAULT_COMPATIBILITY_INFO");
defaultCompatibilityInfoField.setAccessible(true);
final Object defaultCompatibilityInfo = defaultCompatibilityInfoField.get(null);
/**
* ApplicationInfo
*/
final ApplicationInfo applicationInfo = getApplicationInfo(apkFile, context);
/**
* ActivityThread # getPackageInfoNoCheck
* LoadedApk
*/
final Method getPackageInfoNoCheckMethod = activityThreadClass.getDeclaredMethod(
"getPackageInfoNoCheck", ApplicationInfo.class, compatibilityInfoClass);
final Object loadedApk = getPackageInfoNoCheckMethod.invoke(currentActivityThread,
applicationInfo, defaultCompatibilityInfo);
/**
* Classloader
*/
final String odexPath = AssetsUtils.getPluginOptDexDir(context, applicationInfo.packageName)
.getPath();
final String libDir = AssetsUtils.getPluginLibDir(context, applicationInfo.packageName)
.getPath();
final ClassLoader classLoader = new SmartClassloader(
apkFile.getPath(),
odexPath,
libDir,
ClassLoader.getSystemClassLoader()
);
/**
* Hook LoadedApk # ClassLoader mClassLoader
*/
final Field mClassLoaderField = loadedApk.getClass().getDeclaredField("mClassLoader");
mClassLoaderField.setAccessible(true);
mClassLoaderField.set(loadedApk, classLoader);
/**
* LoadedApk
*/
LOADEDAPK_MAP.put(applicationInfo.packageName, loadedApk);
/**
* Hook ActivityThread # ArrayMap<String, WeakReference<LoadedApk>> mPackages
*/
final WeakReference<Object> weakReference = new WeakReference<>(loadedApk);
mPackages.put(applicationInfo.packageName, weakReference);
}
/**
* Apk application
*
*
* PackageParser generateApplicationInfo
*
* @param apkFile apkFile
* @throws Exception exception
*/
@SuppressLint("PrivateApi")
public static ApplicationInfo getApplicationInfo(@NonNull final File apkFile,
@NonNull final Context context)
throws Exception {
final ApplicationInfo applicationInfo;
/**
* PackageParser # parsePackage(File packageFile, int flags)
*/
final Class<?> packageParserClass = Class.forName("android.content.pm.PackageParser");
final int sdkVersion = Build.VERSION.SDK_INT;
if (sdkVersion < Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
throw new RuntimeException(
"[LoadedApkHooker] the sdk version must >= 14 (4.0.0)");
}
final Object packageParser;
final Object packageObject;
final Method parsePackageMethod;
if (sdkVersion >= Build.VERSION_CODES.LOLLIPOP) {
// >= 5.0.0
// parsePackage(File packageFile, int flags)
/**
* PackageParser
*
* PackageParser # parsePackage(File packageFile, int flags)
* apk Package
*/
packageParser = packageParserClass.newInstance();
parsePackageMethod = packageParserClass.getDeclaredMethod("parsePackage",
File.class, int.class);
packageObject = parsePackageMethod.invoke(
packageParser,
apkFile,
PackageManager.GET_SERVICES
);
} else {
// >= 4.0.0
// parsePackage(File sourceFile, String destCodePath, DisplayMetrics metrics, int flags)
/**
* PackageParser PackageParser(String archiveSourcePath)
*
* PackageParser # parsePackage(File sourceFile, String destCodePath, DisplayMetrics metrics, int flags)
* apk Package
*/
final String apkFileAbsolutePath = apkFile.getAbsolutePath();
packageParser = packageParserClass.getConstructor(String.class)
.newInstance(apkFileAbsolutePath);
parsePackageMethod = packageParserClass.getDeclaredMethod("parsePackage",
File.class, String.class, DisplayMetrics.class, int.class);
packageObject = parsePackageMethod.invoke(
packageParser,
apkFile,
apkFile.getAbsolutePath(),
context.getResources().getDisplayMetrics(),
PackageManager.GET_SERVICES
);
}
final Class<?> packageParser$Package = Class.forName(
"android.content.pm.PackageParser$Package");
if (sdkVersion >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
// >= 4.2.0
// generateApplicationInfo(Package p, int flags, PackageUserState state)
/**
* PackageUserState
*/
final Class<?> packageUserStateClass = Class.forName(
"android.content.pm.PackageUserState");
final Object defaultUserState = packageUserStateClass.newInstance();
// android.content.pm.PackageParser#generateApplicationInfo(Package p, int flags, PackageUserState state)
final Method generateApplicationInfo = packageParserClass.getDeclaredMethod(
"generateApplicationInfo",
packageParser$Package, int.class, packageUserStateClass);
applicationInfo = (ApplicationInfo) generateApplicationInfo.invoke(
packageParser,
packageObject,
0 ,
defaultUserState
);
} else if (sdkVersion >= Build.VERSION_CODES.JELLY_BEAN) {
// >= 4.1.0
// generateApplicationInfo(Package p, int flags, boolean stopped, int enabledState, int userId)
// android.content.pm.PackageParser#generateApplicationInfo(Package p, int flags, boolean stopped, int enabledState, int userId)
final Class<?> userHandler = Class.forName("android.os.UserId");
final Method getCallingUserIdMethod = userHandler.getDeclaredMethod("getCallingUserId");
final int userId = (Integer) getCallingUserIdMethod.invoke(null);
Method generateApplicationInfo = packageParserClass.getDeclaredMethod(
"generateApplicationInfo",
packageParser$Package, int.class, boolean.class, int.class);
/**
* PackageParser # generateApplicationInfo(Package p, int flags, boolean stopped, int enabledState, int userId)
*
* 4.0.0
* public class PackageParser {
* public final static class Package {
* // User set enabled state.
* public int mSetEnabled = PackageManager.COMPONENT_ENABLED_STATE_DEFAULT;
*
* // Whether the package has been stopped.
* public boolean mSetStopped = false;
* }
* }
*
*
*/
applicationInfo = (ApplicationInfo) generateApplicationInfo.invoke(
packageParser,
packageObject,
0 ,
false,
PackageManager.COMPONENT_ENABLED_STATE_DEFAULT,
userId
);
} else if (sdkVersion >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
// >= 4.0.0
// generateApplicationInfo(Package p, int flags)
// android.content.pm.PackageParser#generateApplicationInfo(Package p, int flags)
Method generateApplicationInfo = packageParserClass.getDeclaredMethod(
"generateApplicationInfo",
packageParser$Package, int.class);
/**
* PackageParser # generateApplicationInfo(Package p, int flags)
*
*
*/
applicationInfo = (ApplicationInfo) generateApplicationInfo.invoke(
packageParser,
packageObject,
0
);
} else {
applicationInfo = null;
}
return applicationInfo;
}
}
|
package net.sf.mmm.search.indexer.impl;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.Reader;
import java.io.StringWriter;
import java.util.Properties;
import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.impl.NoOpLog;
import net.sf.mmm.search.api.SearchEntry;
import net.sf.mmm.search.indexer.api.MutableSearchEntry;
import net.sf.mmm.search.indexer.api.SearchIndexer;
import net.sf.mmm.search.parser.api.ContentParser;
import net.sf.mmm.search.parser.api.ContentParserService;
import net.sf.mmm.search.parser.impl.ContentParserServiceImpl;
import net.sf.mmm.util.filter.FileFilterAdapter;
import net.sf.mmm.util.filter.Filter;
import net.sf.mmm.util.filter.FilterRuleChainPlainParser;
import net.sf.mmm.util.io.FileUtil;
/**
* This class contains functionality to recursively walk through directories and
* add contained files to a search index.
*
* @author Joerg Hohwiller (hohwille at users.sourceforge.net)
*/
public class DirectorySearchIndexer {
/** @see #getIndexer() */
private SearchIndexer indexer;
/** @see #getFilter() */
private FileFilter filter;
/** @see #getLogger() */
private Log logger;
/** the parser service */
private ContentParserService parserService;
/**
* The constructor.
*/
public DirectorySearchIndexer() {
super();
}
/**
* The constructor.
*
* @param indexer
*/
public DirectorySearchIndexer(SearchIndexer indexer) {
super();
this.indexer = indexer;
}
/**
* @return the indexer
*/
public SearchIndexer getIndexer() {
return this.indexer;
}
/**
* @param indexer
* the indexer to set
*/
@Resource
public void setIndexer(SearchIndexer indexer) {
this.indexer = indexer;
}
/**
* @return the filter
*/
public FileFilter getFilter() {
return this.filter;
}
/**
* @see #setFilterByConfiguration(Reader)
*
* @param filter
* the filter to set
*/
@Resource
public void setFilter(FileFilter filter) {
this.filter = filter;
}
/**
* This method sets the filter by a reader pointing to a configuration file.
* For the format of the configuration see {@link FilterRuleChainPlainParser}.
*
* @param reader
* is a fresh reader to the configuration.It will be closed at the end
* of this method (on success and in an exceptional state).
* @throws IOException
* if the operation failed with an I/O error.
*/
public void setFilterByConfiguration(Reader reader) throws IOException {
FilterRuleChainPlainParser parser = new FilterRuleChainPlainParser();
Filter<String> stringFilter = parser.parse(reader);
this.filter = FileFilterAdapter.convertStringFilter(stringFilter);
}
/**
* @return the logger
*/
public Log getLogger() {
return this.logger;
}
/**
* @param logger
* the logger to set
*/
@Resource
public void setLogger(Log logger) {
this.logger = logger;
}
/**
* @return the parserService
*/
public ContentParserService getParserService() {
return this.parserService;
}
/**
* @param parserService
* the parserService to set
*/
@Resource
public void setParserService(ContentParserService parserService) {
this.parserService = parserService;
}
/**
* This method initializes this object.
*/
@PostConstruct
public void initialize() {
if (this.parserService == null) {
this.parserService = new ContentParserServiceImpl();
}
if (this.logger == null) {
this.logger = new NoOpLog();
}
}
/**
* This method starts the indexing from the given <code>directory</code>.
*
* @see #indexDirectory(String, File)
*
* @param directory
* is the directory to index recursively.
*/
public void indexDirectory(File directory) {
indexDirectory(null, directory);
}
/**
* This method starts the indexing from the given <code>directory</code>
* adding the given <code>source</code> as metadata.
*
* @param source
* is the {@link SearchEntry#getSource() source} attribute of the
* indexed entries.
* @param directory
* is the directory to index recursively.
*/
public void indexDirectory(String source, File directory) {
indexDirectory(source, directory, "");
}
public void indexDirectory(String source, File directory, String relativePath) {
for (File child : directory.listFiles()) {
if ((this.filter == null) || (this.filter.accept(child))) {
if (child.isDirectory()) {
indexDirectory(source, child, relativePath + "/" + child.getName());
} else if (child.isFile()) {
indexFile(source, child, relativePath);
}
} else {
this.logger.debug("Filtered " + child.getPath());
}
}
}
/**
* This method gets the property <code>key</code> from the given
* <code>properties</code>. It will also {@link String#trim() trim} the
* properties value.
*
* @param properties
* is where to get the property from.
* @param key
* is the name of the requested property.
* @return the trimmed property or <code>null</code> if the property is NOT
* set or its trimmed value is the empty string.
*/
protected String getProperty(Properties properties, String key) {
String value = properties.getProperty(key);
if (value != null) {
value = value.trim();
if (value.length() == 0) {
value = null;
}
}
return value;
}
/**
* This method indexes a single file.
*
* @param source
* is the {@link SearchEntry#getSource() source} attribute of the
* indexed entry.
* @param file
* is the file to index.
* @param relativePath
* is the path of the folder where the file is located relative to the
* path given when the indexing was started. This is used to build the
* {@link SearchEntry#getUri() URI} of the file in the search index.
*/
public void indexFile(String source, File file, String relativePath) {
this.logger.debug("Indexing " + file.getPath());
String filename = file.getName();
String fullPath = relativePath + "/" + filename;
long fileSize = file.length();
String extension = FileUtil.getExtension(filename);
MutableSearchEntry entry = this.indexer.createEntry();
entry.setUri(fullPath);
entry.setSize(fileSize);
ContentParser parser = null;
if (extension != null) {
entry.setType(extension);
parser = this.parserService.getParser(extension);
}
if (parser != null) {
try {
InputStream inputStream = new FileInputStream(file);
try {
Properties properties = parser.parse(inputStream, fileSize);
String title = getProperty(properties, ContentParser.PROPERTY_KEY_TITLE);
if (title != null) {
entry.setTitle(title);
}
String author = getProperty(properties, ContentParser.PROPERTY_KEY_AUTHOR);
if (author != null) {
entry.setAuthor(author);
}
String text = getProperty(properties, ContentParser.PROPERTY_KEY_TEXT);
if (text != null) {
entry.setText(text);
}
} catch (Exception e) {
this.logger.error("Failed to extract data from file: " + file.getPath(), e);
// TODO: this is just a temporary hack!!!
StringWriter sw = new StringWriter();
e.printStackTrace(new PrintWriter(sw));
entry.setText(sw.toString());
}
} catch (FileNotFoundException e) {
this.logger.error("Filed disappeared while indexing " + file.getPath());
}
}
if (source != null) {
entry.setSource(source);
}
this.indexer.add(entry);
}
}
|
package net.ontrack.extension.svnexplorer;
import net.ontrack.core.model.SearchResult;
import net.ontrack.extension.api.ExtensionManager;
import net.ontrack.extension.issue.IssueService;
import net.ontrack.extension.issue.IssueServiceFactory;
import net.ontrack.extension.issue.IssueServiceSummary;
import net.ontrack.extension.svn.service.RepositoryService;
import net.ontrack.extension.svn.service.SubversionService;
import net.ontrack.extension.svn.service.model.SVNRepository;
import net.ontrack.service.GUIService;
import net.ontrack.service.SearchProvider;
import net.sf.jstring.LocalizableMessage;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@Component
public class IssueSearchProvider implements SearchProvider {
private final RepositoryService repositoryService;
private final SubversionService subversionService;
private final GUIService guiService;
private final IssueServiceFactory issueServiceFactory;
private final ExtensionManager extensionManager;
@Autowired
public IssueSearchProvider(RepositoryService repositoryService, SubversionService subversionService, GUIService guiService, IssueServiceFactory issueServiceFactory, ExtensionManager extensionManager) {
this.repositoryService = repositoryService;
this.subversionService = subversionService;
this.guiService = guiService;
this.issueServiceFactory = issueServiceFactory;
this.extensionManager = extensionManager;
}
@Override
public boolean isTokenSearchable(String token) {
for (IssueServiceSummary issueServiceSummary : issueServiceFactory.getAllServices()) {
IssueService service = issueServiceFactory.getServiceByName(issueServiceSummary.getId());
if (service.isIssue(token)) {
return true;
}
}
return false;
}
@Override
public Collection<SearchResult> search(String key) {
if (extensionManager.isExtensionEnabled(SVNExplorerExtension.EXTENSION)) {
Collection<SearchResult> results = new ArrayList<>();
for (SVNRepository repository : repositoryService.getAllRepositories()) {
// Is the issue indexed in this repository?
if (subversionService.isIndexedIssue(repository, key)) {
SearchResult result = new SearchResult(
key,
new LocalizableMessage("svnexplorer.search.key", key, repository.getName()),
guiService.toGUI(String.format("extension/svnexplorer/repository/%d/issue/%s", repository.getId(), key)),
80
);
results.add(result);
}
}
return results;
} else {
return Collections.emptySet();
}
}
}
|
package org.metaborg.spoofax.meta.core.pluto.build.main;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import javax.annotation.Nullable;
import org.apache.commons.io.FileUtils;
import org.metaborg.core.config.JSGLRVersion;
import org.metaborg.core.config.Sdf2tableVersion;
import org.metaborg.core.language.LanguageIdentifier;
import org.metaborg.sdf2table.grammar.NormGrammar;
import org.metaborg.sdf2table.parsetable.ParseTable;
import org.metaborg.spoofax.meta.core.config.SdfVersion;
import org.metaborg.spoofax.meta.core.config.StrategoBuildSetting;
import org.metaborg.spoofax.meta.core.config.StrategoFormat;
import org.metaborg.spoofax.meta.core.pluto.SpoofaxBuilder;
import org.metaborg.spoofax.meta.core.pluto.SpoofaxBuilderFactory;
import org.metaborg.spoofax.meta.core.pluto.SpoofaxBuilderFactoryFactory;
import org.metaborg.spoofax.meta.core.pluto.SpoofaxContext;
import org.metaborg.spoofax.meta.core.pluto.SpoofaxInput;
import org.metaborg.spoofax.meta.core.pluto.build.MakePermissive;
import org.metaborg.spoofax.meta.core.pluto.build.PackNormalizedSdf;
import org.metaborg.spoofax.meta.core.pluto.build.PackNormalizedSdfLegacy;
import org.metaborg.spoofax.meta.core.pluto.build.Rtg2Sig;
import org.metaborg.spoofax.meta.core.pluto.build.Sdf2Parenthesize;
import org.metaborg.spoofax.meta.core.pluto.build.Sdf2ParenthesizeLegacy;
import org.metaborg.spoofax.meta.core.pluto.build.Sdf2Rtg;
import org.metaborg.spoofax.meta.core.pluto.build.Sdf2Table;
import org.metaborg.spoofax.meta.core.pluto.build.Sdf2TableLegacy;
import org.metaborg.spoofax.meta.core.pluto.build.StrIncr;
import org.metaborg.spoofax.meta.core.pluto.build.Strj;
import org.metaborg.spoofax.meta.core.pluto.build.Typesmart;
import org.metaborg.spoofax.meta.core.pluto.build.misc.GetStrategoMix;
import org.metaborg.util.cmd.Arguments;
import com.google.common.collect.Lists;
import build.pluto.builder.BuildRequest;
import build.pluto.dependency.Origin;
import build.pluto.output.None;
import build.pluto.output.OutputPersisted;
import build.pluto.stamp.FileExistsStamper;
public class GenerateSourcesBuilder extends SpoofaxBuilder<GenerateSourcesBuilder.Input, None> {
public static class Input extends SpoofaxInput {
private static final long serialVersionUID = -2379365089609792204L;
public final String languageId;
public final @Nullable Collection<LanguageIdentifier> sourceDeps;
public final @Nullable String sdfModule;
public final @Nullable Boolean sdfEnabled;
public final @Nullable File sdfFile;
public final SdfVersion sdfVersion;
public final Sdf2tableVersion sdf2tableVersion;
public final JSGLRVersion jsglrVersion;
public final @Nullable File sdfExternalDef;
public final List<File> packSdfIncludePaths;
public final Arguments packSdfArgs;
public final @Nullable String sdfCompletionModule;
public final @Nullable File sdfCompletionFile;
public final @Nullable List<String> sdfMetaModules;
public final @Nullable List<File> sdfMetaFiles;
public final @Nullable File strFile;
public final @Nullable String strJavaPackage;
public final @Nullable String strJavaStratPackage;
public final @Nullable File strJavaStratFile;
public final StrategoFormat strFormat;
public final @Nullable File strExternalJar;
public final @Nullable String strExternalJarFlags;
public final List<File> strjIncludeDirs;
public final List<File> strjIncludeFiles;
public final Arguments strjArgs;
public final StrategoBuildSetting strBuildSetting;
public Input(SpoofaxContext context, String languageId, Collection<LanguageIdentifier> sourceDeps,
@Nullable Boolean sdfEnabled, @Nullable String sdfModule, @Nullable File sdfFile, JSGLRVersion jsglrVersion,
SdfVersion sdfVersion, Sdf2tableVersion sdf2tableVersion, @Nullable File sdfExternalDef,
List<File> packSdfIncludePaths, Arguments packSdfArgs, @Nullable String sdfCompletionModule,
@Nullable File sdfCompletionFile, @Nullable List<String> sdfMetaModules, @Nullable List<File> sdfMetaFiles,
@Nullable File strFile, @Nullable String strJavaPackage, @Nullable String strJavaStratPackage,
@Nullable File strJavaStratFile, StrategoFormat strFormat, @Nullable File strExternalJar,
@Nullable String strExternalJarFlags, List<File> strjIncludeDirs, List<File> strjIncludeFiles,
Arguments strjArgs, StrategoBuildSetting strBuildSetting) {
super(context);
this.languageId = languageId;
this.sdfEnabled = sdfEnabled;
this.sourceDeps = sourceDeps;
this.sdfModule = sdfModule;
this.sdfFile = sdfFile;
this.jsglrVersion = jsglrVersion;
this.sdfVersion = sdfVersion;
this.sdf2tableVersion = sdf2tableVersion;
this.sdfExternalDef = sdfExternalDef;
this.packSdfIncludePaths = packSdfIncludePaths;
this.packSdfArgs = packSdfArgs;
this.sdfCompletionModule = sdfCompletionModule;
this.sdfCompletionFile = sdfCompletionFile;
this.sdfMetaModules = sdfMetaModules;
this.sdfMetaFiles = sdfMetaFiles;
this.strFile = strFile;
this.strJavaPackage = strJavaPackage;
this.strJavaStratPackage = strJavaStratPackage;
this.strJavaStratFile = strJavaStratFile;
this.strFormat = strFormat;
this.strExternalJar = strExternalJar;
this.strExternalJarFlags = strExternalJarFlags;
this.strjIncludeDirs = strjIncludeDirs;
this.strjIncludeFiles = strjIncludeFiles;
this.strjArgs = strjArgs;
this.strBuildSetting = strBuildSetting;
}
}
public static SpoofaxBuilderFactory<Input, None, GenerateSourcesBuilder> factory =
SpoofaxBuilderFactoryFactory.of(GenerateSourcesBuilder.class, Input.class);
public GenerateSourcesBuilder(Input input) {
super(input);
}
public static
BuildRequest<Input, None, GenerateSourcesBuilder, SpoofaxBuilderFactory<Input, None, GenerateSourcesBuilder>>
request(Input input) {
return new BuildRequest<>(factory, input);
}
public static Origin origin(Input input) {
return Origin.from(request(input));
}
@Override protected String description(Input input) {
return "Generate sources";
}
@Override public File persistentPath(Input input) {
return context.depPath("generate-sources.dep");
}
@Override public None build(GenerateSourcesBuilder.Input input) throws IOException {
// SDF
Origin.Builder sdfOriginBuilder = Origin.Builder();
buildSdf(input, sdfOriginBuilder);
buildSdfMeta(input, sdfOriginBuilder); // SDF meta-module for creating a Stratego concrete syntax extension parse table
final Origin sdfOrigin = sdfOriginBuilder.get();
requireBuild(sdfOrigin);
// Stratego
buildStratego(input, sdfOrigin);
return None.val;
}
private void buildSdf(GenerateSourcesBuilder.Input input, Origin.Builder sdfOriginBuilder) throws IOException {
if(input.sdfModule != null && input.sdfEnabled) {
if(input.sdf2tableVersion == Sdf2tableVersion.java || input.sdf2tableVersion == Sdf2tableVersion.dynamic
|| input.sdf2tableVersion == Sdf2tableVersion.incremental) {
newParseTableGenerationBuild(input, sdfOriginBuilder);
} else {
oldParseTableGenerationBuild(input, sdfOriginBuilder);
}
}
}
private void newParseTableGenerationBuild(GenerateSourcesBuilder.Input input, Origin.Builder sdfOriginBuilder) throws IOException {
// Standard parser generation
final File srcNormDir = toFile(paths.syntaxNormDir());
final File sdfNormFile = FileUtils.getFile(srcNormDir, input.sdfModule + "-norm.aterm");
final BuildRequest<?, OutputPersisted<ParseTable>, ?, ?> parseTableGeneration = newParseTableGeneration(input, sdfNormFile, "sdf.tbl", "table.bin");
sdfOriginBuilder.add(parseTableGeneration);
// Generate parenthesizer
final File srcGenPpDir = toFile(paths.syntaxSrcGenPpDir());
final File parenthesizerOutputFile = FileUtils.getFile(srcGenPpDir, input.sdfModule + "-parenthesize.str");
Sdf2Parenthesize.Input parenthesizeInput = new Sdf2Parenthesize.Input(context, parseTableGeneration, input.sdfModule, parenthesizerOutputFile);
final BuildRequest<?, ?, ?, ?> parenthesize = Sdf2Parenthesize.request(parenthesizeInput);
sdfOriginBuilder.add(parenthesize);
// Parser generation for completions
if(input.sdfCompletionFile != null && input.sdfEnabled) {
final BuildRequest<?, OutputPersisted<ParseTable>, ?, ?> parseTableGenerationCompletions = newParseTableGeneration(input, input.sdfCompletionFile, "sdf-completions.tbl", "table-completions.bin");
sdfOriginBuilder.add(parseTableGenerationCompletions);
}
}
private BuildRequest<?, OutputPersisted<ParseTable>, ?, ?> newParseTableGeneration(GenerateSourcesBuilder.Input input, File sdfNormFile, String tableFilename, String persistedTableFilename) throws IOException {
final File targetMetaborgDir = toFile(paths.targetMetaborgDir());
final File tableFile = FileUtils.getFile(targetMetaborgDir, tableFilename);
final File persistedTableFile = FileUtils.getFile(targetMetaborgDir, persistedTableFilename);
final boolean dynamicGeneration = (input.sdf2tableVersion == Sdf2tableVersion.dynamic
|| input.sdf2tableVersion == Sdf2tableVersion.incremental);
final boolean dataDependent = (input.jsglrVersion == JSGLRVersion.dataDependent);
final boolean layoutSensitive = (input.jsglrVersion == JSGLRVersion.layoutSensitive);
BuildRequest<?, OutputPersisted<NormGrammar>, ?, ?> packNormGrammar = PackNormalizedSdf.request(new PackNormalizedSdf.Input(context, sdfNormFile, input.sourceDeps));
Sdf2Table.Input sdf2TableInput = new Sdf2Table.Input(context, packNormGrammar, tableFile, persistedTableFile, dynamicGeneration, dataDependent, layoutSensitive);
return Sdf2Table.request(sdf2TableInput);
}
private void oldParseTableGenerationBuild(GenerateSourcesBuilder.Input input, Origin.Builder sdfOriginBuilder) throws IOException {
File srcGenSyntaxDir = toFile(paths.syntaxSrcGenDir());
// Packing normalized .sdf files in a single .def file
PackSdfBuild packSdfBuild = oldParseTableGenerationPack(input, srcGenSyntaxDir, input.sdfModule, input.sdfFile, input.sdfExternalDef);
if(packSdfBuild.file != null) {
// Get Stratego signatures file when using an external .def, or when using sdf2, from the SDF .def file
if(input.sdfExternalDef != null || input.sdfVersion == SdfVersion.sdf2) {
final Origin sigOrigin = oldParseTableGenerationSignatures(input, sdfOriginBuilder, packSdfBuild, srcGenSyntaxDir, input.sdfModule, input.sdfExternalDef);
sdfOriginBuilder.add(sigOrigin);
}
// Get Stratego parenthesizer file, from the SDF .def file
Origin parenthesizeOrigin = oldParseTableGenerationParenthesize(input, sdfOriginBuilder, packSdfBuild, input.sdfModule);
sdfOriginBuilder.add(parenthesizeOrigin);
// Standard parser generation
MakePermissiveBuild makePermissiveBuild = oldParseTableGenerationMakePermissive(packSdfBuild, srcGenSyntaxDir, input.sdfModule);
final Origin sdfOrigin = oldParseTableGeneration(makePermissiveBuild, input.sdfModule, "sdf.tbl", "");
requireBuild(sdfOrigin);
}
// Again packing, make permissive, and generation for completions parse table
if(input.sdfCompletionFile != null && input.sdfEnabled) {
File srcGenSyntaxCompletionsDir = toFile(paths.syntaxCompletionSrcGenDir());
PackSdfBuild packSdfCompletionsBuild = oldParseTableGenerationPack(input, srcGenSyntaxCompletionsDir, input.sdfCompletionModule, input.sdfCompletionFile, null);
MakePermissiveBuild makePermissiveCompletionsBuild = oldParseTableGenerationMakePermissive(packSdfCompletionsBuild, srcGenSyntaxCompletionsDir, input.sdfCompletionModule);
final Origin sdfCompletionOrigin = oldParseTableGeneration(makePermissiveCompletionsBuild, input.sdfCompletionModule, "sdf-completions.tbl", "completion/");
sdfOriginBuilder.add(sdfCompletionOrigin);
}
}
private PackSdfBuild oldParseTableGenerationPack(GenerateSourcesBuilder.Input input, File srcGenSyntaxDir, String sdfModule, File sdfFile, File sdfExternalDef) throws IOException {
// Get the SDF .def file, either from existing external .def, or by running pack SDF on the grammar specification
final @Nullable File packSdfFile;
final @Nullable Origin packSdfOrigin;
if(sdfExternalDef != null) {
packSdfFile = sdfExternalDef;
packSdfOrigin = null;
} else if(sdfFile != null) {
require(sdfFile, FileExistsStamper.instance);
if(!sdfFile.exists()) {
throw new IOException("Main SDF file at " + sdfFile + " does not exist");
}
packSdfFile = FileUtils.getFile(srcGenSyntaxDir, sdfModule + ".def");
packSdfOrigin = PackNormalizedSdfLegacy.origin(new PackNormalizedSdfLegacy.Input(context, sdfModule, sdfFile, packSdfFile,
input.packSdfIncludePaths, input.packSdfArgs, null));
} else {
packSdfFile = null;
packSdfOrigin = null;
}
return new PackSdfBuild(packSdfFile, packSdfOrigin);
}
private static class PackSdfBuild {
final @Nullable File file;
final @Nullable Origin origin;
PackSdfBuild(File packSdfFile, Origin packSdfOrigin) {
this.file = packSdfFile;
this.origin = packSdfOrigin;
}
}
private Origin oldParseTableGenerationSignatures(GenerateSourcesBuilder.Input input, Origin.Builder sdfOriginBuilder, PackSdfBuild packSdfBuild, File srcGenSyntaxDir, String sdfModule, File sdfExternalDef) {
final File srcGenSigDir = toFile(paths.syntaxSrcGenSignatureDir());
final File rtgFile = FileUtils.getFile(srcGenSigDir, sdfModule + ".rtg");
final Origin rtgOrigin = Sdf2Rtg.origin(new Sdf2Rtg.Input(context, packSdfBuild.file, rtgFile, sdfModule, packSdfBuild.origin));
final File sigFile = FileUtils.getFile(srcGenSigDir, sdfModule + ".str");
final String sigModule = "signatures/" + sdfModule;
final Origin sigOrigin = Rtg2Sig.origin(new Rtg2Sig.Input(context, rtgFile, sigFile, sigModule, rtgOrigin));
return sigOrigin;
}
private Origin oldParseTableGenerationParenthesize(GenerateSourcesBuilder.Input input, Origin.Builder sdfOriginBuilder, PackSdfBuild packSdfBuild, String sdfModule) {
final File srcGenPpDir = toFile(paths.syntaxSrcGenPpDir());
final File parenthesizeFile = FileUtils.getFile(srcGenPpDir, sdfModule + "-parenthesize.str");
final String parenthesizeModule = "pp/" + sdfModule + "-parenthesize";
final Origin parenthesizeOrigin = Sdf2ParenthesizeLegacy.origin(new Sdf2ParenthesizeLegacy.Input(context,
packSdfBuild.file, parenthesizeFile, sdfModule, parenthesizeModule, packSdfBuild.origin));
return parenthesizeOrigin;
}
private MakePermissiveBuild oldParseTableGenerationMakePermissive(PackSdfBuild packSdfBuild, File srcGenSyntaxDir, String sdfModule) throws IOException {
final File permissiveDefFile = FileUtils.getFile(srcGenSyntaxDir, sdfModule + "-permissive.def");
final Origin permissiveDefOrigin = MakePermissive.origin(new MakePermissive.Input(context, packSdfBuild.file, permissiveDefFile, sdfModule, packSdfBuild.origin));
return new MakePermissiveBuild(permissiveDefFile, permissiveDefOrigin);
}
private static class MakePermissiveBuild {
final @Nullable File file;
final @Nullable Origin origin;
MakePermissiveBuild(File permissiveDefFile, Origin permissiveDefOrigin) {
this.file = permissiveDefFile;
this.origin = permissiveDefOrigin;
}
}
private Origin oldParseTableGeneration(MakePermissiveBuild makePermissiveBuild, String sdfModule, String parseTableFilename, String modulePrefix) throws IOException {
final File targetMetaborgDir = toFile(paths.targetMetaborgDir());
final File tableFile = FileUtils.getFile(targetMetaborgDir, parseTableFilename);
return Sdf2TableLegacy.origin(new Sdf2TableLegacy.Input(context,
makePermissiveBuild.file, tableFile, modulePrefix + sdfModule, makePermissiveBuild.origin));
}
private void buildSdfMeta(GenerateSourcesBuilder.Input input, Origin.Builder sdfOriginBuilder) throws IOException {
final File srcGenSyntaxDir = toFile(paths.syntaxSrcGenDir());
for(int i = 0; i < input.sdfMetaFiles.size(); i++) {
final File sdfMetaFile = input.sdfMetaFiles.get(i);
if(sdfMetaFile != null) {
require(sdfMetaFile, FileExistsStamper.instance);
if(!sdfMetaFile.exists()) {
throw new IOException("Main meta-SDF file at " + sdfMetaFile + " does not exist");
}
final String sdfMetaModule = input.sdfMetaModules.get(i);
final BuildRequest<GetStrategoMix.Input, OutputPersisted<File>, GetStrategoMix, SpoofaxBuilderFactory<GetStrategoMix.Input, OutputPersisted<File>, GetStrategoMix>> getStrategoMixRequest =
GetStrategoMix.request(new GetStrategoMix.Input(context));
final File strategoMixFile = requireBuild(getStrategoMixRequest).val();
final Origin strategoMixOrigin = Origin.from(getStrategoMixRequest);
final Arguments packSdfMetaArgs = new Arguments(input.packSdfArgs);
packSdfMetaArgs.addFile("-Idef", strategoMixFile);
final File packSdfFile = FileUtils.getFile(srcGenSyntaxDir, sdfMetaModule + ".def");
final Origin packSdfOrigin = PackNormalizedSdfLegacy.origin(new PackNormalizedSdfLegacy.Input(context, sdfMetaModule, sdfMetaFile,
packSdfFile, input.packSdfIncludePaths, packSdfMetaArgs, strategoMixOrigin));
final File permissiveDefFile = FileUtils.getFile(srcGenSyntaxDir, sdfMetaModule + "-permissive.def");
final Origin permissiveDefOrigin = MakePermissive.origin(
new MakePermissive.Input(context, packSdfFile, permissiveDefFile, sdfMetaModule, packSdfOrigin));
final File transDir = toFile(paths.transDir());
final File tableFile = FileUtils.getFile(transDir, sdfMetaModule + ".tbl");
Origin sdfMetaOrigin = Sdf2TableLegacy.origin(new Sdf2TableLegacy.Input(context, permissiveDefFile,
tableFile, sdfMetaModule, permissiveDefOrigin));
sdfOriginBuilder.add(sdfMetaOrigin);
requireBuild(sdfMetaOrigin);
}
}
}
private void buildStratego(GenerateSourcesBuilder.Input input, Origin sdfOrigin) throws IOException {
final File targetMetaborgDir = toFile(paths.targetMetaborgDir());
final File strFile = input.strFile;
if(strFile != null) {
require(strFile, FileExistsStamper.instance);
if(!strFile.exists()) {
throw new IOException("Main Stratego file at " + strFile + " does not exist");
}
boolean buildStrJavaStrat = input.strJavaStratPackage != null && input.strJavaStratFile != null;
if(buildStrJavaStrat) {
require(input.strJavaStratFile, FileExistsStamper.instance);
if(!input.strJavaStratFile.exists()) {
throw new IOException(
"Main Stratego Java strategies file at " + input.strJavaStratFile + " does not exist");
}
}
final Arguments extraArgs = new Arguments();
extraArgs.addAll(input.strjArgs);
final File outputFile;
final File depPath;
if(input.strFormat == StrategoFormat.ctree) {
outputFile = FileUtils.getFile(targetMetaborgDir, "stratego.ctree");
depPath = outputFile;
extraArgs.add("-F");
} else {
depPath = toFile(paths.strSrcGenJavaTransDir(input.languageId));
outputFile = FileUtils.getFile(depPath, "Main.java");
extraArgs.add("-la", "java-front");
if(buildStrJavaStrat) {
extraArgs.add("-la", input.strJavaStratPackage);
}
}
if(input.strExternalJarFlags != null) {
extraArgs.addLine(input.strExternalJarFlags);
}
final File cacheDir = toFile(paths.strCacheDir());
if(input.strBuildSetting == StrategoBuildSetting.incremental) {
final StrIncr.Input strIncrInput = new StrIncr.Input(context, strFile, input.strJavaPackage,
input.strjIncludeDirs, input.strjIncludeFiles, cacheDir, extraArgs, depPath, sdfOrigin);
requireBuild(StrIncr.request(strIncrInput));
} else {
final Strj.Input strjInput =
new Strj.Input(context, strFile, outputFile, depPath, input.strJavaPackage, true, true,
input.strjIncludeDirs, input.strjIncludeFiles, Lists.newArrayList(), cacheDir, extraArgs,
sdfOrigin);
final Origin strjOrigin = Strj.origin(strjInput);
requireBuild(strjOrigin);
}
// Typesmart
final File typesmartExportedFile = toFile(paths.strTypesmartExportedFile());
final Typesmart.Input typesmartInput =
new Typesmart.Input(context, input.strFile, input.strjIncludeDirs, typesmartExportedFile, sdfOrigin);
final Origin typesmartOrigin = Typesmart.origin(typesmartInput);
requireBuild(typesmartOrigin);
}
}
}
|
package org.openwms.common.domain.types;
/**
* A Target is either a physical or a logical endpoint of any kind of order in a
* warehouse. A <code>TransportOrder</code> has a Target set, to where a
* <code>TransportUnit</code> has to be moved to.
*
* @GlossaryTerm
* @author <a href="mailto:scherrer@openwms.org">Heiko Scherrer</a>
* @version $Revision$
* @since 0.1
*/
public interface Target {
}
|
package de.evorepair.analysis.viewer.viewer;
import java.util.ArrayList;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.util.EContentAdapter;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.SashForm;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.List;
import org.eclipse.ui.IEditorInput;
import org.eclipse.ui.IFileEditorInput;
import de.christophseidl.util.ecore.EcoreIOUtil;
import de.darwinspl.feature.graphical.configurator.viewer.DwFeatureModelConfiguratorViewer;
import eu.hyvar.feature.configuration.HyConfiguration;
import eu.hyvar.feature.configuration.util.HyConfigurationUtil;
public class EvoConfigurationRepairSuggestionViewer extends DwFeatureModelConfiguratorViewer{
public static final String SUGGESTIONS_FOLDER = ".solutions";
/**
* Instance of the actual resource that will be overriden
*/
private HyConfiguration configuration;
/**
* Contains all possible solutions for an anomaly that can be found in the solution folder
*/
java.util.List<HyConfiguration> suggestions = new ArrayList<>();
/**
* the widget that shows all possible suggestions
*/
List suggestionList;
/**
* Button to apply the selected suggestion
*/
Button applyButton;
public HyConfiguration getConfiguration() {
return configuration;
}
public void setConfiguration(HyConfiguration configuration) {
this.configuration = configuration;
}
/**
* Creates an editor that displays additional widgets on the right side of the editor
*/
@Override
public void createPartControl(Composite parent) {
Composite sash = new Composite(parent, SWT.NONE);
sash.setLayout(new FillLayout());
sash.setLayoutData(new GridData(GridData.FILL_BOTH));
final SashForm sashForm = new SashForm(sash, SWT.HORIZONTAL);
// Left sash
super.createEditor(sashForm);
// Right sash
createConfigurationPanel(sashForm);
sashForm.setWeights(new int[] { 4, 1});
parent.setLayout(new GridLayout(1, false));
super.createSliderControl(parent);
registerControlListeners();
}
/**
* Creates the right panel that displays a list with all possible solutions and a button
* to apply a particular suggestion
*
* @param parent
* @return
*/
private Composite createConfigurationPanel(Composite parent) {
Composite configurationPanel = new Composite(parent, SWT.NONE);
configurationPanel.setLayout(new GridLayout(1, false));
suggestionList = new List(configurationPanel, SWT.BORDER | SWT.V_SCROLL);
suggestionList.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true));
int index = 0;
for(HyConfiguration configuration : suggestions) {
suggestionList.add("Repair Suggestion "+index);
if(configuration.getId().equals(selectedConfiguration.getId()))
suggestionList.select(index);
index++;
}
applyButton = new Button(configurationPanel, SWT.PUSH);
applyButton.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false));
applyButton.setText("Apply Suggestion");
refreshView();
return configurationPanel;
}
private IResource[] getFilesFromSolutionFolder() {
IResource resource = ((IFileEditorInput)getEditorInput()).getFile();
IFolder folder = resource.getProject().getFolder(EvoConfigurationRepairSuggestionViewer.SUGGESTIONS_FOLDER);
IResource[] files;
try {
files = folder.members();
return files;
} catch (CoreException e) {
e.printStackTrace();
return new IResource[0];
}
}
/**
* Sets the input of the editor. Also adds all possible solutions to the suggestions list
*/
@Override
protected void setInput(IEditorInput input) {
super.setInput(input);
for(IResource file : getFilesFromSolutionFolder()) {
if(file instanceof IFile) {
if(file.getFileExtension().equals(HyConfigurationUtil.getConfigurationModelFileExtensionForXmi())) {
HyConfiguration suggestion = EcoreIOUtil.loadModel((IFile)file);
suggestions.add(suggestion);
}
}
}
}
/**
* Register different listeners to react on user input
*/
@Override
public void registerControlListeners() {
super.registerControlListeners();
suggestionList.addSelectionListener(new SelectionListener() {
public void widgetSelected(SelectionEvent event) {
int[] selections = suggestionList.getSelectionIndices();
selectedConfiguration = suggestions.get(selections[0]);
refreshView();
}
public void widgetDefaultSelected(SelectionEvent event) {
int[] selections = suggestionList.getSelectionIndices();
selectedConfiguration = suggestions.get(selections[0]);
refreshView();
}
});
applyButton.addSelectionListener(new SelectionListener() {
public void widgetSelected(SelectionEvent event) {
int[] selections = suggestionList.getSelectionIndices();
applySuggestion(suggestions.get(selections[0]));
}
public void widgetDefaultSelected(SelectionEvent event) {
int[] selections = suggestionList.getSelectionIndices();
applySuggestion(suggestions.get(selections[0]));
}
});
selectedConfiguration.eAdapters().add(new EContentAdapter() {
@Override
public void notifyChanged(Notification notification) {
super.notifyChanged(notification);
}
});
}
/**
* Sets the editor tab name and prevent overwriting it
*/
@Override
protected void setEditorTabText(String text){
this.setPartName("Suggestions Viewer");
}
/**
* applies a solution and replace the original configuration file
*
* @param configuration
*/
private void applySuggestion(HyConfiguration configuration) {
this.configuration.getElements().clear();
this.configuration.getElements().addAll(configuration.getElements());
EcoreIOUtil.saveModel(this.configuration);
}
}
|
package com.matthewtamlin.spyglass.processor.mirror_helpers.type_mirror_helper;
import com.google.testing.compile.CompilationRule;
import com.google.testing.compile.JavaFileObjects;
import com.matthewtamlin.avatar.element_supplier.IdBasedElementSupplier;
import com.matthewtamlin.spyglass.processor.mirror_helpers.TypeMirrorHelper;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import java.io.File;
import java.net.MalformedURLException;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.Elements;
import javax.lang.model.util.Types;
import javax.tools.JavaFileObject;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.mockito.Mockito.mock;
public class TestTypeMirrorHelper {
@Rule
public final CompilationRule compilationRule = new CompilationRule();
private static final File DATA_FILE = new File("processor/src/test/java/com/matthewtamlin/spyglass/processor" +
"/mirror_helpers/type_mirror_helper/Data.java");
private IdBasedElementSupplier elementSupplier;
private TypeMirrorHelper helper;
@BeforeClass
public static void setupClass() {
assertThat("Data file does not exist.", DATA_FILE.exists(), is(true));
}
@Before
public void setup() throws MalformedURLException {
final JavaFileObject dataFileObject = JavaFileObjects.forResource(DATA_FILE.toURI().toURL());
elementSupplier = new IdBasedElementSupplier(dataFileObject);
helper = new TypeMirrorHelper(compilationRule.getElements(), compilationRule.getTypes());
}
@Test(expected = IllegalArgumentException.class)
public void testConstructor_nullElementUtil() {
new TypeMirrorHelper(null, mock(Types.class));
}
@Test(expected = IllegalArgumentException.class)
public void testConstructor_nullTypeUtil() {
new TypeMirrorHelper(mock(Elements.class), null);
}
@Test
public void testIsPrimitive_typeMirrorForPrimitiveBoolean() {
doIsPrimitiveTestFor("with primitive boolean", true);
}
@Test
public void testIsPrimitive_typeMirrorForPrimitiveByte() {
doIsPrimitiveTestFor("with primitive byte", true);
}
@Test
public void testIsPrimitive_typeMirrorForPrimitiveChar() {
doIsPrimitiveTestFor("with primitive char", true);
}
@Test
public void testIsPrimitive_typeMirrorForPrimitiveDouble() {
doIsPrimitiveTestFor("with primitive double", true);
}
@Test
public void testIsPrimitive_typeMirrorForPrimitiveFloat() {
doIsPrimitiveTestFor("with primitive float", true);
}
@Test
public void testIsPrimitive_typeMirrorForPrimitiveInt() {
doIsPrimitiveTestFor("with primitive int", true);
}
@Test
public void testIsPrimitive_typeMirrorForPrimitiveLong() {
doIsPrimitiveTestFor("with primitive long", true);
}
@Test
public void testIsPrimitive_typeMirrorForPrimitiveShort() {
doIsPrimitiveTestFor("with primitive short", true);
}
@Test
public void testIsPrimitive_typeMirrorForBoxedBoolean() {
doIsPrimitiveTestFor("with boxed boolean", false);
}
@Test
public void testIsPrimitive_typeMirrorForBoxedByte() {
doIsPrimitiveTestFor("with boxed byte", false);
}
@Test
public void testIsPrimitive_typeMirrorForBoxedCharacter() {
doIsPrimitiveTestFor("with boxed char", false);
}
@Test
public void testIsPrimitive_typeMirrorForBoxedDouble() {
doIsPrimitiveTestFor("with boxed double", false);
}
@Test
public void testIsPrimitive_typeMirrorForBoxedFloat() {
doIsPrimitiveTestFor("with boxed float", false);
}
@Test
public void testIsPrimitive_typeMirrorForBoxedInteger() {
doIsPrimitiveTestFor("with boxed int", false);
}
@Test
public void testIsPrimitive_typeMirrorForBoxedLong() {
doIsPrimitiveTestFor("with boxed long", false);
}
@Test
public void testIsPrimitive_typeMirrorForBoxedShort() {
doIsPrimitiveTestFor("with boxed short", false);
}
@Test
public void testIsPrimitive_typeMirrorForObject() {
doIsPrimitiveTestFor("with object", false);
}
@Test
public void testIsNumber_typeMirrorForPrimitiveBoolean() {
doIsNumberTestFor("with primitive boolean", false);
}
@Test
public void testIsNumber_typeMirrorForPrimitiveByte() {
doIsNumberTestFor("with primitive byte", true);
}
@Test
public void testIsNumber_typeMirrorForPrimitiveChar() {
doIsNumberTestFor("with primitive char", false);
}
@Test
public void testIsNumber_typeMirrorForPrimitiveDouble() {
doIsNumberTestFor("with primitive double", true);
}
@Test
public void testIsNumber_typeMirrorForPrimitiveFloat() {
doIsNumberTestFor("with primitive float", true);
}
@Test
public void testIsNumber_typeMirrorForPrimitiveInt() {
doIsNumberTestFor("with primitive int", true);
}
@Test
public void testIsNumber_typeMirrorForPrimitiveLong() {
doIsNumberTestFor("with primitive long", true);
}
@Test
public void testIsNumber_typeMirrorForPrimitiveShort() {
doIsNumberTestFor("with primitive short", true);
}
@Test
public void testIsNumber_typeMirrorForBoxedBoolean() {
doIsNumberTestFor("with boxed boolean", false);
}
@Test
public void testIsNumber_typeMirrorForBoxedByte() {
doIsNumberTestFor("with boxed byte", true);
}
@Test
public void testIsNumber_typeMirrorForBoxedChar() {
doIsNumberTestFor("with boxed char", false);
}
@Test
public void testIsNumber_typeMirrorForBoxedDouble() {
doIsNumberTestFor("with boxed double", true);
}
@Test
public void testIsNumber_typeMirrorForBoxedFloat() {
doIsNumberTestFor("with boxed float", true);
}
@Test
public void testIsNumber_typeMirrorForBoxedInt() {
doIsNumberTestFor("with boxed int", true);
}
@Test
public void testIsNumber_typeMirrorForBoxedLong() {
doIsNumberTestFor("with boxed long", true);
}
@Test
public void testIsNumber_typeMirrorForBoxedShort() {
doIsNumberTestFor("with boxed short", true);
}
@Test
public void testIsNumber_typeMirrorForObject() {
doIsNumberTestFor("with object", false);
}
@Test
public void testIsNumber_typeMirrorForNumber() {
doIsNumberTestFor("with number", true);
}
@Test
public void testIsNumber_typeMirrorForOtherNumberImplementation() {
doIsNumberTestFor("with custom number implementation", true);
}
@Test
public void testIsNumber_typeMirrorForOtherNumberImplementationSubclass() {
doIsNumberTestFor("with custom number implementation subclass", true);
}
@Test
public void testIsCharacter_typeMirrorForPrimitiveBoolean() {
doIsCharacterTestFor("with primitive boolean", false);
}
@Test
public void testIsCharacter_typeMirrorForPrimitiveByte() {
doIsCharacterTestFor("with primitive byte", false);
}
@Test
public void testIsCharacter_typeMirrorForPrimitiveChar() {
doIsCharacterTestFor("with primitive char", true);
}
@Test
public void testIsCharacter_typeMirrorForPrimitiveDouble() {
doIsCharacterTestFor("with primitive double", false);
}
@Test
public void testIsCharacter_typeMirrorForPrimitiveFloat() {
doIsCharacterTestFor("with primitive float", false);
}
@Test
public void testIsCharacter_typeMirrorForPrimitiveInt() {
doIsCharacterTestFor("with primitive int", false);
}
@Test
public void testIsCharacter_typeMirrorForPrimitiveLong() {
doIsCharacterTestFor("with primitive long", false);
}
@Test
public void testIsCharacter_typeMirrorForPrimitiveShort() {
doIsCharacterTestFor("with primitive short", false);
}
@Test
public void testIsCharacter_typeMirrorForBoxedBoolean() {
doIsCharacterTestFor("with boxed boolean", false);
}
@Test
public void testIsCharacter_typeMirrorForBoxedByte() {
doIsCharacterTestFor("with boxed byte", false);
}
@Test
public void testIsCharacter_typeMirrorForBoxedChar() {
doIsCharacterTestFor("with boxed char", true);
}
@Test
public void testIsCharacter_typeMirrorForBoxedDouble() {
doIsCharacterTestFor("with boxed double", false);
}
@Test
public void testIsCharacter_typeMirrorForBoxedFloat() {
doIsCharacterTestFor("with boxed float", false);
}
@Test
public void testIsCharacter_typeMirrorForBoxedInt() {
doIsCharacterTestFor("with boxed int", false);
}
@Test
public void testIsCharacter_typeMirrorForBoxedLong() {
doIsCharacterTestFor("with boxed long", false);
}
@Test
public void testIsCharacter_typeMirrorForBoxedShort() {
doIsCharacterTestFor("with boxed short", false);
}
@Test
public void testIsCharacter_typeMirrorForObject() {
doIsCharacterTestFor("with object", false);
}
@Test
public void testIsBoolean_typeMirrorForPrimitiveBoolean() {
doIsBooleanTestFor("with primitive boolean", true);
}
@Test
public void testIsBoolean_typeMirrorForPrimitiveByte() {
doIsBooleanTestFor("with primitive byte", false);
}
@Test
public void testIsBoolean_typeMirrorForPrimitiveChar() {
doIsBooleanTestFor("with primitive char", false);
}
@Test
public void testIsBoolean_typeMirrorForPrimitiveDouble() {
doIsBooleanTestFor("with primitive double", false);
}
@Test
public void testIsBoolean_typeMirrorForPrimitiveFloat() {
doIsBooleanTestFor("with primitive float", false);
}
@Test
public void testIsBoolean_typeMirrorForPrimitiveInt() {
doIsBooleanTestFor("with primitive int", false);
}
@Test
public void testIsBoolean_typeMirrorForPrimitiveLong() {
doIsBooleanTestFor("with primitive long", false);
}
@Test
public void testIsBoolean_typeMirrorForPrimitiveShort() {
doIsBooleanTestFor("with primitive short", false);
}
@Test
public void testIsBoolean_typeMirrorForBoxedBoolean() {
doIsBooleanTestFor("with boxed boolean", true);
}
@Test
public void testIsBoolean_typeMirrorForBoxedByte() {
doIsBooleanTestFor("with boxed byte", false);
}
@Test
public void testIsBoolean_typeMirrorForBoxedChar() {
doIsBooleanTestFor("with boxed char", false);
}
@Test
public void testIsBoolean_typeMirrorForBoxedDouble() {
doIsBooleanTestFor("with boxed double", false);
}
@Test
public void testIsBoolean_typeMirrorForBoxedFloat() {
doIsBooleanTestFor("with boxed float", false);
}
@Test
public void testIsBoolean_typeMirrorForBoxedInt() {
doIsBooleanTestFor("with boxed int", false);
}
@Test
public void testIsBoolean_typeMirrorForBoxedLong() {
doIsBooleanTestFor("with boxed long", false);
}
@Test
public void testIsBoolean_typeMirrorForBoxedShort() {
doIsBooleanTestFor("with boxed short", false);
}
@Test
public void testIsBoolean_typeMirrorForObject() {
doIsBooleanTestFor("with object", false);
}
@Test
public void testIsEnum_typeMirrorForPrimitiveBoolean() {
doIsEnumTestFor("with primitive boolean", false);
}
@Test
public void testIsEnum_typeMirrorForPrimitiveByte() {
doIsEnumTestFor("with primitive byte", false);
}
@Test
public void testIsEnum_typeMirrorForPrimitiveChar() {
doIsEnumTestFor("with primitive char", false);
}
@Test
public void testIsEnum_typeMirrorForPrimitiveDouble() {
doIsEnumTestFor("with primitive double", false);
}
@Test
public void testIsEnum_typeMirrorForPrimitiveFloat() {
doIsEnumTestFor("with primitive float", false);
}
@Test
public void testIsEnum_typeMirrorForPrimitiveInt() {
doIsEnumTestFor("with primitive int", false);
}
@Test
public void testIsEnum_typeMirrorForPrimitiveLong() {
doIsEnumTestFor("with primitive long", false);
}
@Test
public void testIsEnum_typeMirrorForPrimitiveShort() {
doIsEnumTestFor("with primitive short", false);
}
@Test
public void testIsEnum_typeMirrorForBoxedBoolean() {
doIsEnumTestFor("with boxed boolean", false);
}
@Test
public void testIsEnum_typeMirrorForBoxedByte() {
doIsEnumTestFor("with boxed byte", false);
}
@Test
public void testIsEnum_typeMirrorForBoxedChar() {
doIsEnumTestFor("with boxed char", false);
}
@Test
public void testIsEnum_typeMirrorForBoxedDouble() {
doIsEnumTestFor("with boxed double", false);
}
@Test
public void testIsEnum_typeMirrorForBoxedFloat() {
doIsEnumTestFor("with boxed float", false);
}
@Test
public void testIsEnum_typeMirrorForBoxedInt() {
doIsEnumTestFor("with boxed int", false);
}
@Test
public void testIsEnum_typeMirrorForBoxedLong() {
doIsEnumTestFor("with boxed long", false);
}
@Test
public void testIsEnum_typeMirrorForBoxedShort() {
doIsEnumTestFor("with boxed short", false);
}
@Test
public void testIsEnum_typeMirrorForObject() {
doIsEnumTestFor("with object", false);
}
@Test
public void testIsEnum_typeMirrorForRawEnum() {
doIsEnumTestFor("with raw enum", true);
}
@Test
public void testIsEnum_typeMirrorForWildcardEnum() {
doIsEnumTestFor("with wildcard enum", true);
}
@Test
public void testIsEnum_typeMirrorForRegularEnum() {
doIsEnumTestFor("with regular enum", true);
}
@Test
public void testBoxPrimitive_typeMirrorForPrimitiveBoolean() {
doBoxPrimitiveTestWithPassExpectedFor("with primitive boolean", Boolean.class.getCanonicalName());
}
@Test
public void testBoxPrimitive_typeMirrorForPrimitiveByte() {
doBoxPrimitiveTestWithPassExpectedFor("with primitive byte", Byte.class.getCanonicalName());
}
@Test
public void testBoxPrimitive_typeMirrorForPrimitiveChar() {
doBoxPrimitiveTestWithPassExpectedFor("with primitive char", Character.class.getCanonicalName());
}
@Test
public void testBoxPrimitive_typeMirrorForPrimitiveDouble() {
doBoxPrimitiveTestWithPassExpectedFor("with primitive double", Double.class.getCanonicalName());
}
@Test
public void testBoxPrimitive_typeMirrorForPrimitiveFloat() {
doBoxPrimitiveTestWithPassExpectedFor("with primitive float", Float.class.getCanonicalName());
}
@Test
public void testBoxPrimitive_typeMirrorForPrimitiveInt() {
doBoxPrimitiveTestWithPassExpectedFor("with primitive int", Integer.class.getCanonicalName());
}
@Test
public void testBoxPrimitive_typeMirrorForPrimitiveLong() {
doBoxPrimitiveTestWithPassExpectedFor("with primitive long", Long.class.getCanonicalName());
}
@Test
public void testBoxPrimitive_typeMirrorForPrimitiveShort() {
doBoxPrimitiveTestWithPassExpectedFor("with primitive short", Short.class.getCanonicalName());
}
@Test(expected = IllegalArgumentException.class)
public void testBoxPrimitive_typeMirrorForBoxedBoolean() {
doBoxPrimitiveTestWithFailExpectedFor("with boxed boolean");
}
@Test(expected = IllegalArgumentException.class)
public void testBoxPrimitive_typeMirrorForBoxedByte() {
doBoxPrimitiveTestWithFailExpectedFor("with boxed byte");
}
@Test(expected = IllegalArgumentException.class)
public void testBoxPrimitive_typeMirrorForBoxedChar() {
doBoxPrimitiveTestWithFailExpectedFor("with boxed char");
}
@Test(expected = IllegalArgumentException.class)
public void testBoxPrimitive_typeMirrorForBoxedDouble() {
doBoxPrimitiveTestWithFailExpectedFor("with boxed double");
}
@Test(expected = IllegalArgumentException.class)
public void testBoxPrimitive_typeMirrorForBoxedFloat() {
doBoxPrimitiveTestWithFailExpectedFor("with boxed float");
}
@Test(expected = IllegalArgumentException.class)
public void testBoxPrimitive_typeMirrorForBoxedInt() {
doBoxPrimitiveTestWithFailExpectedFor("with boxed int");
}
@Test(expected = IllegalArgumentException.class)
public void testBoxPrimitive_typeMirrorForBoxedLong() {
doBoxPrimitiveTestWithFailExpectedFor("with boxed long");
}
@Test(expected = IllegalArgumentException.class)
public void testBoxPrimitive_typeMirrorForBoxedShort() {
doBoxPrimitiveTestWithFailExpectedFor("with boxed short");
}
@Test(expected = IllegalArgumentException.class)
public void testBoxPrimitive_typeMirrorForObject() {
doBoxPrimitiveTestWithPassExpectedFor("with object", Object.class.getCanonicalName());
}
@Test(expected = IllegalArgumentException.class)
public void testIsAssignable_nullSubclassSupplied() {
helper.isAssignable(null, mock(TypeMirror.class));
}
@Test(expected = IllegalArgumentException.class)
public void testIsAssignable_nullSuperclassSupplied() {
helper.isAssignable(mock(TypeMirror.class), null);
}
@Test
public void testIsAssignable_recipientIsRegularClass_suppliedIsFirstLevelSubclassOfRecipient() {
doIsAssignableTestFor("class2", "class1", true);
}
@Test
public void testIsAssignable_recipientIsRegularClass_suppliedIsSecondLevelSubclassOfRecipient() {
doIsAssignableTestFor("class3", "class1", true);
}
@Test
public void testIsAssignable_recipientIsRegularClass_recipientIsFirstLevelSubclassOfSupplied() {
doIsAssignableTestFor("class1", "class2", false);
}
@Test
public void testIsAssignable_recipientIsRegularClass_suppliedIsNotSubclassOfRecipient() {
doIsAssignableTestFor("class1", "class4", false);
}
@Test
public void testIsAssignable_recipientIsRegularClass_suppliedIsSameClassAsRecipient() {
doIsAssignableTestFor("class1", "class1", true);
}
@Test
public void testIsAssignable_recipientIsInterface_suppliedIsFirstLevelImplementationOfRecipient() {
doIsAssignableTestFor("class2", "interface", true);
}
@Test
public void testIsAssignable_recipientIsInterface_suppliedIsSecondLevelImplementationOfRecipient() {
doIsAssignableTestFor("class3", "interface", true);
}
@Test
public void testIsAssignable_recipientIsInterface_recipientIsFirstLevelImplementationOfSupplied() {
doIsAssignableTestFor("interface", "class2", false);
}
@Test
public void testIsAssignable_recipientIsInterface_suppliedIsNotImplementationOfRecipient() {
doIsAssignableTestFor("class4", "interface", false);
}
private void doIsPrimitiveTestFor(final String elementId, final boolean expectedResult) {
final ExecutableElement method = (ExecutableElement) elementSupplier.getUniqueElementWithId(elementId);
final VariableElement parameter = method.getParameters().get(0);
assertThat(helper.isPrimitive(parameter.asType()), is(expectedResult));
}
private void doIsNumberTestFor(final String elementId, final boolean expectedResult) {
final ExecutableElement method = (ExecutableElement) elementSupplier.getUniqueElementWithId(elementId);
final VariableElement parameter = method.getParameters().get(0);
assertThat(helper.isNumber(parameter.asType()), is(expectedResult));
}
private void doIsCharacterTestFor(final String elementId, final boolean expectedResult) {
final ExecutableElement method = (ExecutableElement) elementSupplier.getUniqueElementWithId(elementId);
final VariableElement parameter = method.getParameters().get(0);
assertThat(helper.isCharacter(parameter.asType()), is(expectedResult));
}
private void doIsBooleanTestFor(final String elementId, final boolean expectedResult) {
final ExecutableElement method = (ExecutableElement) elementSupplier.getUniqueElementWithId(elementId);
final VariableElement parameter = method.getParameters().get(0);
assertThat(helper.isBoolean(parameter.asType()), is(expectedResult));
}
private void doIsEnumTestFor(final String elementId, final boolean expectedResult) {
final ExecutableElement method = (ExecutableElement) elementSupplier.getUniqueElementWithId(elementId);
final VariableElement parameter = method.getParameters().get(0);
assertThat(helper.isEnum(parameter.asType()), is(expectedResult));
}
private void doBoxPrimitiveTestWithPassExpectedFor(final String elementId, final String expectedResultClassName) {
final ExecutableElement method = (ExecutableElement) elementSupplier.getUniqueElementWithId(elementId);
final VariableElement parameter = method.getParameters().get(0);
final TypeMirror boxed = helper.boxPrimitive(parameter.asType());
assertThat(boxed.toString(), is(expectedResultClassName));
}
private void doBoxPrimitiveTestWithFailExpectedFor(final String elementId) {
final ExecutableElement method = (ExecutableElement) elementSupplier.getUniqueElementWithId(elementId);
final VariableElement parameter = method.getParameters().get(0);
helper.boxPrimitive(parameter.asType());
}
private void doIsAssignableTestFor(
final String typeElementId,
final String recipientElementId,
final boolean expectedResult) {
final TypeMirror type = elementSupplier.getUniqueElementWithId(typeElementId).asType();
final TypeMirror recipient = elementSupplier.getUniqueElementWithId(recipientElementId).asType();
assertThat(helper.isAssignable(type, recipient), is(expectedResult));
}
}
|
package org.csstudio.archive.reader.kblog;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* This thread transfers error messages from kblogrd to the standard logger.
*
* @author Takashi Nakamoto
*/
public class KBLogErrorHandleThread extends Thread {
private static final String charset = "US-ASCII";
private BufferedReader stderrReader;
private String kblogrdPath;
private int commandId;
private boolean closed;
/**
* Constructor of KBLogErrorHandleThread.
*
* @param kblogrdStdErr InputStream of standard error for kblogrd.
* @param kblogrdPath Path to "kblogrd" command.
* @param commandId Command ID of kblogrd.
*/
KBLogErrorHandleThread(InputStream kblogrdStdErr, String kblogrdPath, int commandId) {
this.kblogrdPath = kblogrdPath;
Logger.getLogger(Activator.ID).log(Level.FINEST,
"Start to read the standard error of " + kblogrdPath + " (" + commandId + ").");
try {
stderrReader = new BufferedReader(new InputStreamReader(kblogrdStdErr, charset));
} catch (UnsupportedEncodingException ex) {
Logger.getLogger(Activator.ID).log(Level.WARNING,
"Character set " + charset + " is not supported in this platform. System default charset will be used as a fallback.");
stderrReader = new BufferedReader(new InputStreamReader(kblogrdStdErr));
}
this.commandId = commandId;
this.closed = false;
}
public void run() {
try {
String line;
// Transfer the messages in the standard error to the CSS logging system.
while ((line = stderrReader.readLine()) != null) {
Logger.getLogger(Activator.ID).log(Level.FINE,
"Error message from " + kblogrdPath + " (" + commandId + "): " + line);
}
} catch (IOException ex) {
Logger.getLogger(Activator.ID).log(Level.WARNING,
"IOException while reading standard error of " + kblogrdPath + " (" + commandId + ")", ex);
}
// Close the standard error.
close();
}
public synchronized void close() {
if (closed)
return;
try{
stderrReader.close();
closed = true;
Logger.getLogger(Activator.ID).log(Level.FINEST,
"End of reading the standard error of " + kblogrdPath + " (" + commandId + ").");
} catch (IOException ex) {
Logger.getLogger(Activator.ID).log(Level.SEVERE,
"Failed to close the standard error off " + kblogrdPath + " (" + commandId + ")", ex);
}
}
public synchronized boolean isClosed() {
return closed;
}
}
|
package com.opengamma.financial.analytics.covariance;
import java.util.Collections;
import java.util.Set;
import com.opengamma.core.position.Position;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.ComputationTargetSpecification;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.FunctionExecutionContext;
import com.opengamma.engine.function.FunctionInputs;
import com.opengamma.engine.value.ComputedValue;
import com.opengamma.engine.value.ValueProperties;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.engine.value.ValueRequirementNames;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.engine.view.ViewCalculationConfiguration;
import com.opengamma.financial.view.HistoricalViewEvaluationFunction;
import com.opengamma.financial.view.HistoricalViewEvaluationMarketData;
import com.opengamma.timeseries.date.localdate.LocalDateDoubleTimeSeries;
/**
* Uses the base market data values only to create the covariance matrix.
*/
public class MarketDataCovarianceMatrixFunction extends SampledCovarianceMatrixFunction {
// SampledCovarianceMatrix
@Override
protected String getDataType() {
return "MarketData";
}
@Override
protected void addValueRequirements(final FunctionCompilationContext context, final Position target, final ViewCalculationConfiguration calcConfig) {
calcConfig.addSpecificRequirement(new ValueRequirement(ValueRequirementNames.VALUE, ComputationTargetSpecification.of(target), ValueProperties.none()));
}
@Override
protected Set<ValueRequirement> createRequirements(final ComputationTargetSpecification tempTargetSpec) {
return Collections.singleton(new ValueRequirement(ValueRequirementNames.HISTORICAL_TIME_SERIES, tempTargetSpec, ValueProperties.with(HistoricalViewEvaluationFunction.MARKET_DATA_PROPERTY_NAME,
HistoricalViewEvaluationFunction.MARKET_DATA_PROPERTY_VALUE).get()));
}
// FunctionInvoker
@Override
public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) {
final HistoricalViewEvaluationMarketData marketData = (HistoricalViewEvaluationMarketData) inputs.getValue(ValueRequirementNames.HISTORICAL_TIME_SERIES);
final ValueSpecification[] marketDataSpecs = marketData.getValueSpecifications().toArray(new ValueSpecification[marketData.getValueSpecifications().size()]);
final LocalDateDoubleTimeSeries[] timeSeries = new LocalDateDoubleTimeSeries[marketDataSpecs.length];
for (int i = 0; i < marketDataSpecs.length; i++) {
timeSeries[i] = marketData.getDoubleTimeSeries(marketDataSpecs[i]);
}
final ValueRequirement desiredValueReq = desiredValues.iterator().next();
final ValueSpecification desiredValueSpec = new ValueSpecification(ValueRequirementNames.COVARIANCE_MATRIX, target.toSpecification(), desiredValueReq.getConstraints());
return Collections.singleton(new ComputedValue(desiredValueSpec, createCovarianceMatrix(timeSeries, marketDataSpecs)));
}
}
|
package org.opensingular.requirement.connector.sei30.util;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
public class EncodeUtil {
private EncodeUtil() {
}
public static String encodeToBase64(File file) throws IOException {
return encodeToBase64(new FileInputStream(file), (int) file.length());
}
public static String encodeToBase64(InputStream fis, int size) throws IOException {
byte[] bytes = IOUtils.toByteArray(fis);
fis.close();
return new String(Base64.encodeBase64(bytes));
}
public static String encodeToBase64(String value) {
return StringUtils.isNotEmpty(value) ? Base64.encodeBase64String(value.getBytes(StandardCharsets.UTF_8)) : null;
}
public static String decodeFromBase64(String value) {
if (StringUtils.isNotEmpty(value)) {
byte[] decodedBytes = Base64.decodeBase64(value);
return new String(decodedBytes, StandardCharsets.UTF_8);
} else {
return "";
}
}
public static String decodeFromBase64Iso(String value) {
if (StringUtils.isNotEmpty(value)) {
byte[] decodedBytes = Base64.decodeBase64(value);
return new String(decodedBytes, StandardCharsets.ISO_8859_1);
} else {
return "";
}
}
}
|
// IT Innovation Centre of Gamma House, Enterprise Road,
// Chilworth Science Park, Southampton, SO16 7NS, UK.
// or reproduced in whole or in part in any manner or form or in or
// on any media by any person other than in accordance with the terms
// of the Licence Agreement supplied with the software, or otherwise
// PURPOSE, except where stated in the Licence Agreement supplied with
// the software.
// Created Date : 15-Aug-2012
// Created for Project : EXPERIMEDIA
package uk.ac.soton.itinnovation.experimedia.arch.ecc.samples.basicECCClient;
import uk.ac.soton.itinnovation.experimedia.arch.ecc.samples.shared.*;
import uk.ac.soton.itinnovation.experimedia.arch.ecc.amqpAPI.impl.amqp.*;
import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.metrics.*;
import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.monitor.*;
import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.experiment.Experiment;
import org.apache.log4j.Logger;
import java.util.*;
import javax.swing.JOptionPane;
public class ECCClientController implements EMIAdapterListener,
ECCClientViewListener
{
private final Logger clientLogger = Logger.getLogger( ECCClientController.class );
private AMQPBasicChannel amqpChannel;
private EMInterfaceAdapter emiAdapter;
private ECCClientView clientView;
private String clientName;
private Entity entityBeingObserved;
private Attribute entityAttribute;
private HashMap<UUID,MetricGenerator> metricGenerators;
private Measurement firstMeasurement;
private Measurement currentMeasurement;
private HashMap<UUID, Report> pendingPushReports;
private HashMap<UUID, Report> pendingPullReports;
public ECCClientController()
{
metricGenerators = new HashMap<UUID,MetricGenerator>();
pendingPushReports = new HashMap<UUID, Report>();
pendingPullReports = new HashMap<UUID, Report>();
}
public void start( String rabbitServerIP,
UUID expMonitorID,
UUID clientID ) throws Exception
{
if ( rabbitServerIP != null &&
expMonitorID != null &&
clientID != null )
{
clientLogger.info( "Trying to connect to Rabbit server on " + rabbitServerIP );
AMQPConnectionFactory amqpFactory = new AMQPConnectionFactory();
amqpFactory.setAMQPHostIPAddress( rabbitServerIP );
try
{
amqpFactory.connectToAMQPHost();
amqpChannel = amqpFactory.createNewChannel();
}
catch (Exception e )
{
clientLogger.error( "Could not connect to Rabbit server" );
throw e;
}
Date date = new Date();
clientName = date.toString();
clientView = new ECCClientView( clientName, this );
clientView.setVisible( true );
// Create EM interface adapter, listen to it...
emiAdapter = new EMInterfaceAdapter( this );
// ... and try registering with the EM.
try { emiAdapter.registerWithEM( clientName,
amqpChannel,
expMonitorID, clientID ); }
catch ( Exception e )
{
clientLogger.error( "Could not attempt registration with EM" );
throw e;
}
}
}
@Override
public void onEMConnectionResult( boolean connected, Experiment expInfo )
{
if ( connected )
{
clientView.setStatus( "Connected to EM" );
clientView.addLogMessage( "Linked to experiment: " + expInfo.getName() );
}
else
clientView.setStatus( "Refused connection to EM" );
}
@Override
public void onEMDeregistration( String reason )
{
clientView.addLogMessage( "Got disconnected from EM: " + reason );
try
{ emiAdapter.disconnectFromEM(); }
catch ( Exception e )
{ clientLogger.error( "Had problems disconnecting from EM: " + e.getMessage() ); }
// Apologise to the user
JOptionPane.showMessageDialog( null, "ECC disconnected this client: " + reason );
clientView.dispose();
System.exit(0);
}
@Override
public void onDescribeSupportedPhases( EnumSet<EMPhase> phasesOUT )
{
// We're going to support all phases (although we won't do much in some of them)
// ... we MUST support the discovery phase by default, but don't need to include
phasesOUT.add( EMPhase.eEMSetUpMetricGenerators );
phasesOUT.add( EMPhase.eEMLiveMonitoring );
phasesOUT.add( EMPhase.eEMPostMonitoringReport );
phasesOUT.add( EMPhase.eEMTearDown );
}
@Override
public void onDescribePushPullBehaviours( Boolean[] pushPullOUT )
{
// We're going to support both push and pull
pushPullOUT[0] = true;
pushPullOUT[1] = true;
}
@Override
public void onPopulateMetricGeneratorInfo()
{
clientView.setStatus( "Sending metric gen info to EM" );
// Create a new entity to be observed (this Java VM)
entityBeingObserved = new Entity();
entityBeingObserved.setName( "EM Client host" );
// Create an attribute to observe
entityAttribute = new Attribute();
entityAttribute.setName( "Client RAM usage" );
entityAttribute.setDescription( "Very simple measurement of total bytes used" );
entityAttribute.setEntityUUID( entityBeingObserved.getUUID() );
entityBeingObserved.addAttribute( entityAttribute );
// Create a single metric generator that will represent this metric data generation
MetricGenerator metricGen = new MetricGenerator();
metricGen.setName( "MGEN " + clientName );
metricGen.setDescription( "Metric generator demonstration" );
metricGen.addEntity( entityBeingObserved );
// Add our generator to a collection - obviously we only actually have one generator here though
metricGenerators.put( metricGen.getUUID(), metricGen );
// Create a single group which will contain...
MetricGroup mg = new MetricGroup();
mg.setName( "Demo group" );
mg.setDescription( "A single group to contain metrics" );
mg.setMetricGeneratorUUID( metricGen.getUUID() );
metricGen.addMetricGroup( mg );
// ... a single MeasurementSet (representing the measures for the attibute)
MeasurementSet ms = new MeasurementSet();
ms.setMetricGroupUUID( mg.getUUID() );
ms.setAttributeUUID( entityAttribute.getUUID() ); // Link the measurement set to the attribute here
mg.addMeasurementSets( ms );
// Define the metric for this MeasurementSet
Metric memMetric = new Metric();
memMetric.setMetricType( MetricType.RATIO );
memMetric.setUnit( new Unit("Bytes") );
ms.setMetric( memMetric );
clientView.addLogMessage( "Discovered generator: " + metricGen.getName() );
// Ready our metric generator for the EM
HashSet mgSet = new HashSet<MetricGenerator>();
mgSet.addAll( metricGenerators.values() );
emiAdapter.setMetricGenerators( mgSet );
}
@Override
public void onDiscoveryTimeOut()
{ clientView.addLogMessage( "Got discovery time-out message" ); }
@Override
public void onSetupMetricGenerator( UUID genID, Boolean[] resultOUT )
{
clientView.setStatus( "Setting up generators" );
// Just signal that the metric generator is ready
resultOUT[0] = true;
clientView.addLogMessage( "Completed generator set-up" );
}
@Override
public void onSetupTimeOut( UUID metricGeneratorID )
{ clientView.addLogMessage( "Got set-up time-out message" ); }
@Override
public void onLiveMonitoringStarted()
{
clientView.addLogMessage( "ECC has started Live Monitoring process" );
}
@Override
public void onStartPushingMetricData()
{
// Allow the human user to manually push some data
clientView.addLogMessage( "Enabling metric push" );
clientView.enablePush( true );
}
@Override
public void onPushReportReceived( UUID reportID )
{
// We'll use this report with the 'MiniEDM' later, but for now...
pendingPushReports.remove( reportID );
// Got the last push, so allow another manual push
clientView.enablePush( true );
}
@Override
public void onPullReportReceived( UUID reportID )
{
// We'll use this report with the 'MiniEDM' later, but for now...
pendingPullReports.remove( reportID );
}
@Override
public void onPullMetricTimeOut( UUID measurementSetID )
{ clientView.addLogMessage( "Got live pull time-out message" ); }
@Override
public void onStopPushingMetricData()
{
// Stop manual pushing of data by the human user
clientView.addLogMessage( "Disabling metric push" );
clientView.enablePush( false );
}
@Override
/*
* Note that 'reportOut' is an OUT parameter provided by the adapter
*/
public void onPullMetric( UUID measurementSetID, Report reportOut )
{
// Create an empty instance of our measurement set
MeasurementSet sampleSet = createMeasurementSetEmptySample();
// Add a snapshot measurement to it
snapshotMeasurement();
sampleSet.addMeasurement( currentMeasurement );
// Set up report (has only a single measure)
Date date = new Date();
reportOut.setReportDate( date );
reportOut.setFromDate( date );
reportOut.setToDate( date );
reportOut.setMeasurementSet( sampleSet );
reportOut.setNumberOfMeasurements( 1 );
// Store for confirmation of pull later
pendingPullReports.put( reportOut.getUUID(), reportOut );
}
@Override
public void onPullingStopped()
{
clientView.addLogMessage( "ECC has stopped pulling" );
}
@Override
/*
* Note that the summaryOUT parameter is an OUT parameter supplied by the
* adapter
*/
public void onPopulateSummaryReport( EMPostReportSummary summaryOUT )
{
// We've only got one MeasurementSet so we'll create a demo summary report
// and just two measurements.. so we'll use these to create a demo summary
// If we don't have any measurements, make some up!
if ( firstMeasurement == null ) snapshotMeasurement();
if ( currentMeasurement == null ) snapshotMeasurement();
// Create a new report for this summary
Report report = new Report();
report.setReportDate( new Date() );
report.setFromDate( firstMeasurement.getTimeStamp() );
report.setToDate( currentMeasurement.getTimeStamp() );
report.setNumberOfMeasurements( 2 );
// We've only got one of each...
MetricGenerator mGen = metricGenerators.values().iterator().next();
MetricGroup mGroup = mGen.getMetricGroups().iterator().next();
MeasurementSet mSet = mGroup.getMeasurementSets().iterator().next();
// Add our one MeasurementSet data to the report and add that to the summary report
report.setMeasurementSet( mSet );
summaryOUT.addReport( report );
}
@Override
public void onPopulateDataBatch( EMDataBatch batchOUT )
{
// We've only stored the first and the last measurements of a single
// MeasurementSet, so just send that
MeasurementSet ms = createMeasurementSetEmptySample();
ms.addMeasurement( firstMeasurement );
ms.addMeasurement( currentMeasurement );
Report batchRep = new Report();
batchRep.setFromDate( firstMeasurement.getTimeStamp() );
batchRep.setToDate( currentMeasurement.getTimeStamp() );
batchRep.setMeasurementSet( ms );
batchRep.setNumberOfMeasurements( 2 );
batchOUT.setBatchReport( batchRep );
}
@Override
public void onReportBatchTimeOut( UUID batchID )
{ clientView.addLogMessage( "Got post-report time-out message" ); }
@Override
public void onGetTearDownResult( Boolean[] resultOUT )
{
clientView.setStatus( "Tearing down" );
clientView.addLogMessage( "Tearing down metric generators" );
// Signal we've successfully torn-down
resultOUT[0] = true;
}
@Override
public void onPushDataClicked()
{
// Create an empty instance of our measurement set
MeasurementSet sampleSet = createMeasurementSetEmptySample();
// Take a current measurement
snapshotMeasurement();
sampleSet.addMeasurement( currentMeasurement );
// Set up report (has only a single measure)
Date date = new Date();
Report sampleReport = new Report();
sampleReport.setMeasurementSet( sampleSet );
sampleReport.setReportDate(date );
sampleReport.setFromDate( date );
sampleReport.setToDate( date );
sampleReport.setMeasurementSet( sampleSet );
sampleReport.setNumberOfMeasurements( 1 );
// ... and store for confirmation of push, then report!
pendingPushReports.put( sampleReport.getUUID(), sampleReport );
emiAdapter.pushMetric( sampleReport );
}
@Override
public void onClientViewClosed()
{
// Need to notify that we're leaving...
try { emiAdapter.disconnectFromEM(); }
catch ( Exception e )
{ clientLogger.error( "Could not cleanly disconnect from EM:\n" + e.getMessage() ); }
}
@Override
public void onTearDownTimeOut()
{ clientView.addLogMessage( "Got tear-down time-out message" ); }
private MeasurementSet createMeasurementSetEmptySample()
{
// Get our only metric generator
MetricGenerator metGen = metricGenerators.values().iterator().next();
metGen.getMetricGroups().iterator().next();
// Get our only metric group
MetricGroup mg = metGen.getMetricGroups().iterator().next();
MeasurementSet currentMS = mg.getMeasurementSets().iterator().next();
return new MeasurementSet( currentMS, false );
}
private void snapshotMeasurement()
{
// Just take a very rough measurement
Runtime rt = Runtime.getRuntime();
String memVal = Long.toString( rt.totalMemory() - rt.freeMemory() );
// Get the (single) measurement set for this snapshot (just to get the ID actually)
MeasurementSet snapshotMS = createMeasurementSetEmptySample();
// Update the latest measurement
currentMeasurement = new Measurement();
currentMeasurement.setMeasurementSetUUID( snapshotMS.getUUID() );
currentMeasurement.setTimeStamp( new Date() );
currentMeasurement.setValue( memVal );
// Store this if it is the first ever measurement
if ( firstMeasurement == null ) firstMeasurement = currentMeasurement;
clientView.addLogMessage( "Memory measurement (bytes): " + memVal );
}
}
|
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* the project. */
package com.milkenknights;
import com.milkenknights.InsightLT.DecimalData;
import com.milkenknights.InsightLT.InsightLT;
import com.milkenknights.InsightLT.StringData;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.Counter;
import edu.wpi.first.wpilibj.DigitalInput;
import edu.wpi.first.wpilibj.DriverStation;
import edu.wpi.first.wpilibj.DriverStationLCD;
import edu.wpi.first.wpilibj.Encoder;
import edu.wpi.first.wpilibj.Gyro;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.Relay;
import edu.wpi.first.wpilibj.RobotDrive;
import edu.wpi.first.wpilibj.SafePWM;
import edu.wpi.first.wpilibj.SpeedController;
import edu.wpi.first.wpilibj.Talon;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.CounterBase.EncodingType;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class Knight extends IterativeRobot {
private static final double JITTER_RANGE = 0.008;
private static final int LEFT_MOTOR = 4;
private static final int RIGHT_MOTOR = 9;
private static final int SHOOTER_TALON = 6;
private static final int ACTUATOR_TALON = 1;
private static final int KICKER_TALON = 5;
private static final int COMPRESSOR_PRESSURE_SWITCH = 7;
private static final int COMPRESSOR_RELAY_CHANNEL = 1;
private static final int DRIVE_SOLENOID_A = 1;
private static final int DRIVE_SOLENOID_B = 2;
private static final int HOOK_SOLENOID_A = 3;
private static final int HOOK_SOLENOID_B = 4;
private static final int CASTER_A = 7;
private static final int CASTER_B = 8;
private static final int KICKER_ENC = 8;
private static final int SHOOTER_ENC = 9;
private static final int LEFT_ENC_A = 11;
private static final int LEFT_ENC_B = 12;
private static final int RIGHT_ENC_A = 13;
private static final int RIGHT_ENC_B = 14;
private static final int GYRO = 1;
private static final int AUTON_CHECK_DI = 14;
// For slow mode
private static final double SLOW_MOD = 0.6;
// For bang bang mode
private static final double SHOOTER_RPM_HIGH = 3700;
private static final double SHOOTER_RPM_LOW = 3400;
// For voltage mode
private static final double SHOOTER_POWER_HIGH = 0.7;
private static final double SHOOTER_POWER_LOW = 0.6;
JStick xbox; // XBox controller
JStick atk; // Logitech ATK3 controller
private boolean usingCheesy;
private DriverStationLCD lcd;
private Compressor compressor;
// Pair state "true" means high gear,
// Pair state "false" means low gear
private SolenoidPair driveGear;
// used to remember the gear that is being used
// while in and out of slow mode
private boolean normalGear;
private SolenoidPair hookClimb;
private SolenoidPair caster;
private Drive drive;
private SpeedController shooter;
private SpeedController actuator;
private SpeedController kicker;
private static final int SHOOTER_MODE_VOLTAGE = 0;
private static final int SHOOTER_BANG_BANG = 1;
private static final int SHOOTER_PID = 2;
private static final int SHOOTER_COMBINED = 3;
private int shooterMode;
private Counter shooterEnc;
private Counter kickerEnc;
private Encoder leftEnc;
private Encoder rightEnc;
private Gyro gyro;
private Relay light;
// Used to determine which autonomous procedure to use
private DigitalInput autonCheck;
// stuff for the InsightLT display
private InsightLT display;
private DecimalData disp_batteryVoltage;
private StringData disp_message;
private void defaultVoltageShooter(boolean on) {
voltageShooter(on, 0.65);
}
private void voltageShooter(boolean on, double frac) {
double output = on ? Utils.voltageSpeed(frac) : 0;
shooter.set(output);
kicker.set(output);
}
private void bangBangShooter(boolean on, double targetRPM) {
double shooterOutput;
if (on) {
shooterOutput = Utils.getBangBang(targetRPM, 0.6, shooterEnc);
} else {
shooterOutput = 0;
}
shooter.set(shooterOutput);
kicker.set(shooterOutput);
}
private void combinedShooter(boolean on, double frac, double targetRPM) {
if (on) {
// shooter gets bang bang at the low speed
// kicker gets 80% voltage
shooter.set(Utils.getBangBang(targetRPM,0.6,shooterEnc));
kicker.set(Utils.voltageSpeed(frac));
} else {
shooter.set(0);
kicker.set(0);
}
}
private void shooterOff() {
shooter.set(0);
kicker.set(0);
}
private void defaultActuator(boolean on) {
actuator.set(on ? 0.4 : 0);
}
public Knight() {
// get robot preferences, stored on the cRIO
drive = new Drive(LEFT_MOTOR, RIGHT_MOTOR);
shooter = new Talon(SHOOTER_TALON);
actuator = new Talon(ACTUATOR_TALON);
kicker = new Talon(KICKER_TALON);
//shooterMode = SHOOTER_MODE_VOLTAGE;
shooterMode = SHOOTER_BANG_BANG;
xbox = new JStick(1);
xbox.setSlow(0.3);
atk = new JStick(2);
lcd = DriverStationLCD.getInstance();
usingCheesy = true;
integral_err = 0;
prev_err = 0;
compressor = new Compressor(COMPRESSOR_PRESSURE_SWITCH,COMPRESSOR_RELAY_CHANNEL);
driveGear = new SolenoidXORPair(DRIVE_SOLENOID_A,DRIVE_SOLENOID_B);
normalGear = driveGear.get();
hookClimb = new SolenoidXANDPair(HOOK_SOLENOID_A,HOOK_SOLENOID_B);
caster = new SolenoidXANDPair(CASTER_A,CASTER_B);
kickerEnc = new Counter(KICKER_ENC);
shooterEnc = new Counter(SHOOTER_ENC);
leftEnc = new Encoder(LEFT_ENC_A, LEFT_ENC_B, true, EncodingType.k4X);
rightEnc = new Encoder(RIGHT_ENC_A, RIGHT_ENC_B, false, EncodingType.k4X);
// inches
leftEnc.setDistancePerPulse(0.102);
rightEnc.setDistancePerPulse(0.102);
gyro = new Gyro(GYRO);
light = new Relay(2);
//autonCheck = new DigitalInput(AUTON_CHECK_DI);
// configure the display to have two lines of text
display = new InsightLT(InsightLT.TWO_ONE_LINE_ZONES);
display.startDisplay();
// add battery display
disp_batteryVoltage = new DecimalData("Bat:");
display.registerData(disp_batteryVoltage,1);
// this shows what mode the robot is in
// i.e. teleop, autonomous, disabled
disp_message = new StringData();
display.registerData(disp_message,2);
}
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
public void robotInit() {
compressor.start();
driveGear.set(true);
drive.setInvertedMotor(RobotDrive.MotorType.kRearRight, true);
drive.setInvertedMotor(RobotDrive.MotorType.kRearLeft,true);
kickerEnc.start();
shooterEnc.start();
leftEnc.start();
rightEnc.start();
}
//This function is called at the start of autonomous
Timer timer;
double autonStart;
int frisbeesThrown;
public void autonomousInit() {
display.stopDisplay();
//shooter.set(0.9);
//kicker.set(0.9);
autonStart = Timer.getFPGATimestamp();
frisbeesThrown = 0;
driveGear.set(false);
// reset encoders
rightEnc.reset();
leftEnc.reset();
}
/**
* This function is called periodically during autonomous
*/
double integral_err;
double prev_err;
double last_timer;
boolean frisbeeDone;
final double WAIT_AFTER_ACTUATOR = 1;
final double WAIT_AFTER_SHOOTING = WAIT_AFTER_ACTUATOR+3.5;
final double DELAY_BETWEEN_FRISBEES = 2.25;
final double FRISBEE_SHOOT_TIME = 0.25;
final double DRIVE_DISTANCE = 102;
final double SLOWDOWN_TIME = 0.25;
final double DRIVE_FORWARD_TIME = 2;
double finishedMovingForward = -1;
public void autonomousPeriodic() {
double currentTime = Timer.getFPGATimestamp() - autonStart;
/*
//drive.tankDrive(0.4, 0.4);
disp_batteryVoltage.setData(DriverStation.getInstance().getBatteryVoltage());
disp_message.setData("autonomous");
if (timer.get() > 1000) {
shooter.set(-1);
}
if (timer.get() > 6000) {
actuator.set(0.4);
}
lcd.println(DriverStationLCD.Line.kUser1, 1, "" + timer.get());
lcd.updateLCD();
*/
/*
double cycleTime = currentTime - WAIT_AFTER_ACTUATOR - (frisbeesThrown*DELAY_BETWEEN_FRISBEES);
SmartDashboard.putNumber("current time", currentTime);
SmartDashboard.putNumber("cycle time", cycleTime);
if (cycleTime > 0) {
if (cycleTime < FRISBEE_SHOOT_TIME) {
frisbeeDone = false;
actuator.set(1);
} else {
if (!frisbeeDone) {
frisbeeDone = true;
frisbeesThrown++;
actuator.set(0);
}
}
} else {
actuator.set(0);
}
SmartDashboard.putBoolean("Frisbee done",frisbeeDone);
SmartDashboard.putNumber("Frisbees thrown",frisbeesThrown);
*/
/*
if (currentTime < DRIVE_FORWARD_TIME) {
drive.tankDrive(0.4,0.4);
} else {
drive.tankDrive(0,0);
actuator.set(1);
}
*/
//voltageShooter(true,0.6);
//bangBangShooter(true,autonCheck.get() ? SHOOTER_RPM_HIGH : SHOOTER_RPM_LOW);
if (currentTime > WAIT_AFTER_SHOOTING) {
bangBangShooter(false,0);
defaultActuator(false);
double left = 0;
double right = 0;
boolean leftDone = false;
// keep going backwards until encoders read 8 feet
/*
if (Math.abs(leftEnc.getDistance()) < DRIVE_DISTANCE) {
left = -1;
} else {
leftDone = true;
}
if (Math.abs(rightEnc.getDistance()) < DRIVE_DISTANCE) {
right = -1;
} else if (leftDone) {
// if both sides are finished, go back to high gear
driveGear.set(true);
}
*/
if (Math.abs(leftEnc.getDistance()) < DRIVE_DISTANCE ||
Math.abs(rightEnc.getDistance()) < DRIVE_DISTANCE) {
left = 1;
right = 1;
} else {
if (finishedMovingForward == -1) {
finishedMovingForward = currentTime;
}
driveGear.set(true);
if (currentTime-finishedMovingForward < SLOWDOWN_TIME) {
left = -0.5;
right = -0.5;
}
}
drive.tankDrive(left,right);
} else if (currentTime > WAIT_AFTER_ACTUATOR) {
defaultActuator(true);
} else {
bangBangShooter(true, SHOOTER_RPM_HIGH);
}
}
public void teleopInit() {
light.set(Relay.Value.kForward);
}
/**
* This function is called periodically during operator control
*/
public void teleopPeriodic() {
xbox.update();
atk.update();
// Press A to toggle cheesy drive
if (xbox.isReleased(JStick.XBOX_A)) {
usingCheesy = !usingCheesy;
}
// use LB to toggle high and low gear
if (xbox.isReleased(JStick.XBOX_LB)) {
driveGear.toggle();
normalGear = !normalGear;
}
// show the solenoids status
lcd.println(DriverStationLCD.Line.kUser3,1,driveGear.get()?"High Gear":"Low Gear ");
// show if the compressor is running
if (compressor.getPressureSwitchValue()) {
lcd.println(DriverStationLCD.Line.kUser6,1,"Compressor is off ");
} else {
lcd.println(DriverStationLCD.Line.kUser6,1,"Compressor is running");
}
// joystick button 1 spins the actuator
defaultActuator(atk.isPressed(1));
/*
// change shooter modes
if (atk.isPressed(11)) {
shooterMode = SHOOTER_MODE_VOLTAGE;
} else if (atk.isPressed(10)) {
shooterMode = SHOOTER_BANG_BANG;
} else if (atk.isPressed(9)) {
shooterMode = SHOOTER_COMBINED;
}
*/
if (shooterMode == SHOOTER_MODE_VOLTAGE) {
if (atk.isPressed(2)) {
defaultVoltageShooter(true);
} else if (atk.isPressed(4) || atk.isPressed(5)) {
voltageShooter(true, 0.6);
} else {
shooterOff();
}
//defaultVoltageShooter(atk.isPressed(2));
} else if (shooterMode == SHOOTER_BANG_BANG) {
if (atk.isPressed(2)) {
bangBangShooter(true, SHOOTER_RPM_HIGH);
} else if (atk.isPressed(4) || atk.isPressed(5)) {
bangBangShooter(true, SHOOTER_RPM_LOW);
} else {
shooterOff();
}
} else if (shooterMode == SHOOTER_PID) {
// TO: shooter PID
} else if (shooterMode == SHOOTER_COMBINED) {
if (atk.isPressed(2)) {
combinedShooter(true,SHOOTER_POWER_HIGH,SHOOTER_RPM_HIGH);
} else if (atk.isPressed(4) || atk.isPressed(5)) {
combinedShooter(true, SHOOTER_POWER_LOW,SHOOTER_RPM_LOW);
} else {
shooterOff();
}
} else {
shooterOff();
}
// toggle the hook climb
if (atk.isReleased(11)) {
hookClimb.toggle();
}
// toggle the caster
if (xbox.isReleased(JStick.XBOX_RB)) {
caster.toggle();
}
//double leftStickX = JStick.removeJitter(xbox.getAxis(JStick.XBOX_LSX), JITTER_RANGE);
double leftStickY = JStick.removeJitter(xbox.getAxis(JStick.XBOX_LSY), JITTER_RANGE);
double rightStickX = JStick.removeJitter(xbox.getAxis(JStick.XBOX_RSX), JITTER_RANGE);
double rightStickY = JStick.removeJitter(xbox.getAxis(JStick.XBOX_RSY), JITTER_RANGE);
boolean slowMode = xbox.getAxis(JStick.XBOX_TRIG) < -0.5;
if (slowMode) {
//driveGear.set(false);
} else {
//driveGear.set(normalGear);
}
if (usingCheesy) {
drive.cheesyDrive(xbox.getSlowedAxis(JStick.XBOX_LSY)*(slowMode?SLOW_MOD:1), rightStickX,
//xbox.isPressed(JStick.XBOX_LJ)
// If either trigger is pressed, enable quickturn
Math.abs(xbox.getAxis(JStick.XBOX_TRIG)) > 0.5
);
lcd.println(DriverStationLCD.Line.kUser4,1,"cheesy drive");
} else {
drive.tankDrive(leftStickY*(slowMode?SLOW_MOD:1), rightStickY*(slowMode?SLOW_MOD:1));
lcd.println(DriverStationLCD.Line.kUser4,1,"tank drive ");
}
if (shooterMode == SHOOTER_MODE_VOLTAGE) {
lcd.println(DriverStationLCD.Line.kUser1,1,"Shooter mode:voltage ");
} else if (shooterMode == SHOOTER_BANG_BANG) {
lcd.println(DriverStationLCD.Line.kUser1,1,"Shooter mode:bangbang");
} else if (shooterMode == SHOOTER_COMBINED) {
lcd.println(DriverStationLCD.Line.kUser1,1,"Shooter mode:combined");
} else {
lcd.println(DriverStationLCD.Line.kUser1,1,"Shooter mode:????????");
}
// print encoder values to see if they're working
lcd.println(DriverStationLCD.Line.kUser2,1,""+shooterEnc.getPeriod());
SmartDashboard.putNumber("Shooter speed", shooterEnc.getPeriod());
SmartDashboard.putNumber("Shooter RPM", 60/shooterEnc.getPeriod());
SmartDashboard.putNumber("Shooter count", shooterEnc.get());
SmartDashboard.putNumber("Kicker speed", kickerEnc.getPeriod());
SmartDashboard.putNumber("Kicker RPM", 60/kickerEnc.getPeriod());
SmartDashboard.putNumber("Kicker count", kickerEnc.getPeriod());
SmartDashboard.putNumber("Left Rate", leftEnc.getRate());
SmartDashboard.putNumber("Left Distance", leftEnc.getDistance());
SmartDashboard.putNumber("Left Raw", leftEnc.getRaw());
SmartDashboard.putNumber("Right Rate", rightEnc.getRate());
SmartDashboard.putNumber("Right Distance", rightEnc.getDistance());
SmartDashboard.putNumber("Right Raw", rightEnc.getRaw());
SmartDashboard.putNumber("Gyro", gyro.getAngle());
//SmartDashboard.putBoolean("Auton check", autonCheck.get());
SmartDashboard.putNumber("Right Wheels", drive.getRight());
SmartDashboard.putNumber("Left Wheels", drive.getLeft());
/*
lcd.updateLCD();
// update the display
disp_batteryVoltage.setData(DriverStation.getInstance().getBatteryVoltage());
disp_message.setData("teleop");
*/
}
public void disabledPeriodic() {
disp_batteryVoltage.setData(DriverStation.getInstance().getBatteryVoltage());
disp_message.setData("disabled");
display.startDisplay();
lcd.updateLCD();
//$wag
leftEnc.reset();
rightEnc.reset();
/*
if (DriverStation.getInstance().isFMSAttached()) {
display.stopDisplay();
} else {
display.startDisplay();
}
*/
}
private boolean shootTester;
private boolean pwmtest;
private SafePWM[] pwms;
public void testInit() {
timer.start();
pwmtest = false;
pwms = new SafePWM[10];
for (int i = 0; i < 10; ++i) {
pwms[i] = new SafePWM(i+1);
}
}
/**
* This function is called periodically during test mode
*/
public void testPeriodic() {
/*
xbox.update();
atk.update();
// toggle between PWM test and austin's thing
if (xbox.isPressed(JStick.XBOX_LB)) {
pwmtest = false;
}
if (xbox.isPressed(JStick.XBOX_RB)) {
pwmtest = true;
}
if (pwmtest) {
for (int i = 0; i < 10; ++i) {
if (atk.isPressed(i+1)) {
pwms[i].setRaw(143);
}
}
} else {
if (xbox.isReleased(JStick.XBOX_A)) {
shootTester = !shootTester;
lcd.println(DriverStationLCD.Line.kUser1, 1, "Shooter Tester ");
} else {
lcd.println(DriverStationLCD.Line.kUser1, 1, "Normal Tester ");
}
//Only spins shooter
shooter.set((atk.isPressed(7)) ? -1 : 0);
//Only spins the kicker
kicker.set((atk.isPressed(6)) ? 1 : 0);
//Slow start for shooting 1
if(shootTester && atk.isPressed(1)) {
if(timer.get() > 2) {
shooter.set(1);
lcd.println(DriverStationLCD.Line.kUser2, 1, "Shooter: On ");
}
if(timer.get() > 4) {
kicker.set(1);
lcd.println(DriverStationLCD.Line.kUser3, 1, "Kicker: On ");
}
if(timer.get() > 7) {
//actuator.set(1);
lcd.println(DriverStationLCD.Line.kUser4, 1, "CAM: On ");
} else {
timer.reset();
shooter.set(0);
kicker.set(0);
actuator.set(0);
lcd.println(DriverStationLCD.Line.kUser2, 1, "Shooter: Off ");
lcd.println(DriverStationLCD.Line.kUser3, 1, "Kicker: Off ");
lcd.println(DriverStationLCD.Line.kUser4, 1, "CAM: Off ");
}
}
lcd.println(DriverStationLCD.Line.kUser1, 1, "" + timer.get());
lcd.updateLCD();
}
*/
}
}
|
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* the project. */
package com.milkenknights;
import com.milkenknights.InsightLT.DecimalData;
import com.milkenknights.InsightLT.InsightLT;
import com.milkenknights.InsightLT.StringData;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.Counter;
import edu.wpi.first.wpilibj.DigitalInput;
import edu.wpi.first.wpilibj.DriverStation;
import edu.wpi.first.wpilibj.DriverStationLCD;
import edu.wpi.first.wpilibj.Encoder;
import edu.wpi.first.wpilibj.Gyro;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.Relay;
import edu.wpi.first.wpilibj.RobotDrive;
import edu.wpi.first.wpilibj.SafePWM;
import edu.wpi.first.wpilibj.SpeedController;
import edu.wpi.first.wpilibj.Talon;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.CounterBase.EncodingType;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class Knight extends IterativeRobot {
private static final double JITTER_RANGE = 0.008;
private static final int LEFT_MOTOR = 4;
private static final int RIGHT_MOTOR = 9;
private static final int SHOOTER_TALON = 6;
private static final int ACTUATOR_TALON = 1;
private static final int KICKER_TALON = 5;
private static final int COMPRESSOR_PRESSURE_SWITCH = 7;
private static final int COMPRESSOR_RELAY_CHANNEL = 1;
private static final int DRIVE_SOLENOID_A = 1;
private static final int DRIVE_SOLENOID_B = 2;
private static final int HOOK_SOLENOID_A = 3;
private static final int HOOK_SOLENOID_B = 4;
private static final int CASTER_A = 7;
private static final int CASTER_B = 8;
private static final int KICKER_ENC = 8;
private static final int SHOOTER_ENC = 9;
private static final int LEFT_ENC_A = 11;
private static final int LEFT_ENC_B = 12;
private static final int RIGHT_ENC_A = 13;
private static final int RIGHT_ENC_B = 14;
private static final int GYRO = 1;
private static final int AUTON_CHECK_DI = 14;
// For slow mode
private static final double SLOW_MOD = 0.6;
// For bang bang mode
private static final double SHOOTER_RPM_HIGH = 3700;
private static final double SHOOTER_RPM_LOW = 3400;
// For voltage mode
private static final double SHOOTER_POWER_HIGH = 0.7;
private static final double SHOOTER_POWER_LOW = 0.6;
JStick xbox; // XBox controller
JStick atk; // Logitech ATK3 controller
private boolean usingCheesy;
private DriverStationLCD lcd;
private Compressor compressor;
// Pair state "true" means high gear,
// Pair state "false" means low gear
private SolenoidPair driveGear;
// used to remember the gear that is being used
// while in and out of slow mode
private boolean normalGear;
private SolenoidPair hookClimb;
private SolenoidPair caster;
private Drive drive;
private SpeedController shooter;
private SpeedController actuator;
private SpeedController kicker;
private static final int SHOOTER_MODE_VOLTAGE = 0;
private static final int SHOOTER_BANG_BANG = 1;
private static final int SHOOTER_PID = 2;
private static final int SHOOTER_COMBINED = 3;
private int shooterMode;
private Counter shooterEnc;
private Counter kickerEnc;
private Encoder leftEnc;
private Encoder rightEnc;
private Gyro gyro;
private Relay light;
// Used to determine which autonomous procedure to use
private DigitalInput autonCheck;
// stuff for the InsightLT display
private InsightLT display;
private DecimalData disp_batteryVoltage;
private StringData disp_message;
private void defaultVoltageShooter(boolean on) {
voltageShooter(on, 0.65);
}
private void voltageShooter(boolean on, double frac) {
double output = on ? Utils.voltageSpeed(frac) : 0;
shooter.set(output);
kicker.set(output);
}
private void bangBangShooter(boolean on, double targetRPM) {
double shooterOutput;
if (on) {
shooterOutput = Utils.getBangBang(targetRPM, 0.6, shooterEnc);
} else {
shooterOutput = 0;
}
shooter.set(shooterOutput);
kicker.set(shooterOutput);
}
private void combinedShooter(boolean on, double frac, double targetRPM) {
if (on) {
// shooter gets bang bang at the low speed
// kicker gets 80% voltage
shooter.set(Utils.getBangBang(targetRPM,0.6,shooterEnc));
kicker.set(Utils.voltageSpeed(frac));
} else {
shooter.set(0);
kicker.set(0);
}
}
private void shooterOff() {
shooter.set(0);
kicker.set(0);
}
private void defaultActuator(boolean on) {
actuator.set(on ? 0.4 : 0);
}
public Knight() {
// get robot preferences, stored on the cRIO
drive = new Drive(LEFT_MOTOR, RIGHT_MOTOR);
shooter = new Talon(SHOOTER_TALON);
actuator = new Talon(ACTUATOR_TALON);
kicker = new Talon(KICKER_TALON);
//shooterMode = SHOOTER_MODE_VOLTAGE;
shooterMode = SHOOTER_BANG_BANG;
xbox = new JStick(1);
xbox.setSlow(0.3);
atk = new JStick(2);
lcd = DriverStationLCD.getInstance();
usingCheesy = true;
integral_err = 0;
prev_err = 0;
compressor = new Compressor(COMPRESSOR_PRESSURE_SWITCH,COMPRESSOR_RELAY_CHANNEL);
driveGear = new SolenoidXORPair(DRIVE_SOLENOID_A,DRIVE_SOLENOID_B);
normalGear = driveGear.get();
hookClimb = new SolenoidXANDPair(HOOK_SOLENOID_A,HOOK_SOLENOID_B);
caster = new SolenoidXANDPair(CASTER_A,CASTER_B);
kickerEnc = new Counter(KICKER_ENC);
shooterEnc = new Counter(SHOOTER_ENC);
leftEnc = new Encoder(LEFT_ENC_A, LEFT_ENC_B, true, EncodingType.k4X);
rightEnc = new Encoder(RIGHT_ENC_A, RIGHT_ENC_B, false, EncodingType.k4X);
// inches
leftEnc.setDistancePerPulse(0.102);
rightEnc.setDistancePerPulse(0.102);
gyro = new Gyro(GYRO);
light = new Relay(2);
//autonCheck = new DigitalInput(AUTON_CHECK_DI);
// configure the display to have two lines of text
display = new InsightLT(InsightLT.TWO_ONE_LINE_ZONES);
display.startDisplay();
// add battery display
disp_batteryVoltage = new DecimalData("Bat:");
display.registerData(disp_batteryVoltage,1);
// this shows what mode the robot is in
// i.e. teleop, autonomous, disabled
disp_message = new StringData();
display.registerData(disp_message,2);
}
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
public void robotInit() {
compressor.start();
driveGear.set(true);
drive.setInvertedMotor(RobotDrive.MotorType.kRearRight, true);
drive.setInvertedMotor(RobotDrive.MotorType.kRearLeft,true);
kickerEnc.start();
shooterEnc.start();
leftEnc.start();
rightEnc.start();
}
//This function is called at the start of autonomous
Timer timer;
double autonStart;
int frisbeesThrown;
public void autonomousInit() {
//shooter.set(0.9);
//kicker.set(0.9);
autonStart = Timer.getFPGATimestamp();
frisbeesThrown = 0;
driveGear.set(false);
// reset encoders
rightEnc.reset();
leftEnc.reset();
}
/**
* This function is called periodically during autonomous
*/
double integral_err;
double prev_err;
double last_timer;
boolean frisbeeDone;
final double WAIT_AFTER_ACTUATOR = 1;
final double WAIT_AFTER_SHOOTING = WAIT_AFTER_ACTUATOR+3.5;
final double DELAY_BETWEEN_FRISBEES = 2.25;
final double FRISBEE_SHOOT_TIME = 0.25;
final double DRIVE_DISTANCE = 102;
final double SLOWDOWN_TIME = 0.25;
final double DRIVE_FORWARD_TIME = 2;
double finishedMovingForward = -1;
public void autonomousPeriodic() {
double currentTime = Timer.getFPGATimestamp() - autonStart;
/*
//drive.tankDrive(0.4, 0.4);
disp_batteryVoltage.setData(DriverStation.getInstance().getBatteryVoltage());
disp_message.setData("autonomous");
if (timer.get() > 1000) {
shooter.set(-1);
}
if (timer.get() > 6000) {
actuator.set(0.4);
}
lcd.println(DriverStationLCD.Line.kUser1, 1, "" + timer.get());
lcd.updateLCD();
*/
/*
double cycleTime = currentTime - WAIT_AFTER_ACTUATOR - (frisbeesThrown*DELAY_BETWEEN_FRISBEES);
SmartDashboard.putNumber("current time", currentTime);
SmartDashboard.putNumber("cycle time", cycleTime);
if (cycleTime > 0) {
if (cycleTime < FRISBEE_SHOOT_TIME) {
frisbeeDone = false;
actuator.set(1);
} else {
if (!frisbeeDone) {
frisbeeDone = true;
frisbeesThrown++;
actuator.set(0);
}
}
} else {
actuator.set(0);
}
SmartDashboard.putBoolean("Frisbee done",frisbeeDone);
SmartDashboard.putNumber("Frisbees thrown",frisbeesThrown);
*/
/*
if (currentTime < DRIVE_FORWARD_TIME) {
drive.tankDrive(0.4,0.4);
} else {
drive.tankDrive(0,0);
actuator.set(1);
}
*/
//voltageShooter(true,0.6);
//bangBangShooter(true,autonCheck.get() ? SHOOTER_RPM_HIGH : SHOOTER_RPM_LOW);
if (currentTime > WAIT_AFTER_SHOOTING) {
bangBangShooter(false,0);
defaultActuator(false);
double left = 0;
double right = 0;
boolean leftDone = false;
// keep going backwards until encoders read 8 feet
/*
if (Math.abs(leftEnc.getDistance()) < DRIVE_DISTANCE) {
left = -1;
} else {
leftDone = true;
}
if (Math.abs(rightEnc.getDistance()) < DRIVE_DISTANCE) {
right = -1;
} else if (leftDone) {
// if both sides are finished, go back to high gear
driveGear.set(true);
}
*/
if (Math.abs(leftEnc.getDistance()) < DRIVE_DISTANCE ||
Math.abs(rightEnc.getDistance()) < DRIVE_DISTANCE) {
left = 1;
right = 1;
} else {
if (finishedMovingForward == -1) {
finishedMovingForward = currentTime;
}
driveGear.set(true);
if (currentTime-finishedMovingForward < SLOWDOWN_TIME) {
left = -0.5;
right = -0.5;
}
}
drive.tankDrive(left,right);
} else if (currentTime > WAIT_AFTER_ACTUATOR) {
defaultActuator(true);
} else {
bangBangShooter(true, SHOOTER_RPM_HIGH);
}
}
public void teleopInit() {
light.set(Relay.Value.kForward);
}
/**
* This function is called periodically during operator control
*/
public void teleopPeriodic() {
xbox.update();
atk.update();
// Press A to toggle cheesy drive
if (xbox.isReleased(JStick.XBOX_A)) {
usingCheesy = !usingCheesy;
}
// use LB to toggle high and low gear
if (xbox.isReleased(JStick.XBOX_LB)) {
driveGear.toggle();
normalGear = !normalGear;
}
// show the solenoids status
lcd.println(DriverStationLCD.Line.kUser3,1,driveGear.get()?"High Gear":"Low Gear ");
// show if the compressor is running
if (compressor.getPressureSwitchValue()) {
lcd.println(DriverStationLCD.Line.kUser6,1,"Compressor is off ");
} else {
lcd.println(DriverStationLCD.Line.kUser6,1,"Compressor is running");
}
// joystick button 1 spins the actuator
defaultActuator(atk.isPressed(1));
/*
// change shooter modes
if (atk.isPressed(11)) {
shooterMode = SHOOTER_MODE_VOLTAGE;
} else if (atk.isPressed(10)) {
shooterMode = SHOOTER_BANG_BANG;
} else if (atk.isPressed(9)) {
shooterMode = SHOOTER_COMBINED;
}
*/
if (shooterMode == SHOOTER_MODE_VOLTAGE) {
if (atk.isPressed(2)) {
defaultVoltageShooter(true);
} else if (atk.isPressed(4) || atk.isPressed(5)) {
voltageShooter(true, 0.6);
} else {
shooterOff();
}
//defaultVoltageShooter(atk.isPressed(2));
} else if (shooterMode == SHOOTER_BANG_BANG) {
if (atk.isPressed(2)) {
bangBangShooter(true, SHOOTER_RPM_HIGH);
} else if (atk.isPressed(4) || atk.isPressed(5)) {
bangBangShooter(true, SHOOTER_RPM_LOW);
} else {
shooterOff();
}
} else if (shooterMode == SHOOTER_PID) {
// TO: shooter PID
} else if (shooterMode == SHOOTER_COMBINED) {
if (atk.isPressed(2)) {
combinedShooter(true,SHOOTER_POWER_HIGH,SHOOTER_RPM_HIGH);
} else if (atk.isPressed(4) || atk.isPressed(5)) {
combinedShooter(true, SHOOTER_POWER_LOW,SHOOTER_RPM_LOW);
} else {
shooterOff();
}
} else {
shooterOff();
}
// toggle the hook climb
if (atk.isReleased(11)) {
hookClimb.toggle();
}
// toggle the caster
if (xbox.isReleased(JStick.XBOX_RB)) {
caster.toggle();
}
//double leftStickX = JStick.removeJitter(xbox.getAxis(JStick.XBOX_LSX), JITTER_RANGE);
double leftStickY = JStick.removeJitter(xbox.getAxis(JStick.XBOX_LSY), JITTER_RANGE);
double rightStickX = JStick.removeJitter(xbox.getAxis(JStick.XBOX_RSX), JITTER_RANGE);
double rightStickY = JStick.removeJitter(xbox.getAxis(JStick.XBOX_RSY), JITTER_RANGE);
boolean slowMode = xbox.getAxis(JStick.XBOX_TRIG) < -0.5;
if (slowMode) {
//driveGear.set(false);
} else {
//driveGear.set(normalGear);
}
if (usingCheesy) {
drive.cheesyDrive(xbox.getSlowedAxis(JStick.XBOX_LSY)*(slowMode?SLOW_MOD:1), rightStickX,
//xbox.isPressed(JStick.XBOX_LJ)
// If either trigger is pressed, enable quickturn
Math.abs(xbox.getAxis(JStick.XBOX_TRIG)) > 0.5
);
lcd.println(DriverStationLCD.Line.kUser4,1,"cheesy drive");
} else {
drive.tankDrive(leftStickY*(slowMode?SLOW_MOD:1), rightStickY*(slowMode?SLOW_MOD:1));
lcd.println(DriverStationLCD.Line.kUser4,1,"tank drive ");
}
if (shooterMode == SHOOTER_MODE_VOLTAGE) {
lcd.println(DriverStationLCD.Line.kUser1,1,"Shooter mode:voltage ");
} else if (shooterMode == SHOOTER_BANG_BANG) {
lcd.println(DriverStationLCD.Line.kUser1,1,"Shooter mode:bangbang");
} else if (shooterMode == SHOOTER_COMBINED) {
lcd.println(DriverStationLCD.Line.kUser1,1,"Shooter mode:combined");
} else {
lcd.println(DriverStationLCD.Line.kUser1,1,"Shooter mode:????????");
}
// print encoder values to see if they're working
lcd.println(DriverStationLCD.Line.kUser2,1,""+shooterEnc.getPeriod());
SmartDashboard.putNumber("Shooter speed", shooterEnc.getPeriod());
SmartDashboard.putNumber("Shooter RPM", 60/shooterEnc.getPeriod());
SmartDashboard.putNumber("Shooter count", shooterEnc.get());
SmartDashboard.putNumber("Kicker speed", kickerEnc.getPeriod());
SmartDashboard.putNumber("Kicker RPM", 60/kickerEnc.getPeriod());
SmartDashboard.putNumber("Kicker count", kickerEnc.getPeriod());
SmartDashboard.putNumber("Left Rate", leftEnc.getRate());
SmartDashboard.putNumber("Left Distance", leftEnc.getDistance());
SmartDashboard.putNumber("Left Raw", leftEnc.getRaw());
SmartDashboard.putNumber("Right Rate", rightEnc.getRate());
SmartDashboard.putNumber("Right Distance", rightEnc.getDistance());
SmartDashboard.putNumber("Right Raw", rightEnc.getRaw());
SmartDashboard.putNumber("Gyro", gyro.getAngle());
//SmartDashboard.putBoolean("Auton check", autonCheck.get());
SmartDashboard.putNumber("Right Wheels", drive.getRight());
SmartDashboard.putNumber("Left Wheels", drive.getLeft());
lcd.updateLCD();
// update the display
disp_batteryVoltage.setData(DriverStation.getInstance().getBatteryVoltage());
disp_message.setData("teleop");
}
public void disabledPeriodic() {
disp_batteryVoltage.setData(DriverStation.getInstance().getBatteryVoltage());
disp_message.setData("disabled");
//$wag
leftEnc.reset();
rightEnc.reset();
/*
if (DriverStation.getInstance().isFMSAttached()) {
display.stopDisplay();
} else {
display.startDisplay();
}
*/
}
private boolean shootTester;
private boolean pwmtest;
private SafePWM[] pwms;
public void testInit() {
timer.start();
pwmtest = false;
pwms = new SafePWM[10];
for (int i = 0; i < 10; ++i) {
pwms[i] = new SafePWM(i+1);
}
}
/**
* This function is called periodically during test mode
*/
public void testPeriodic() {
/*
xbox.update();
atk.update();
// toggle between PWM test and austin's thing
if (xbox.isPressed(JStick.XBOX_LB)) {
pwmtest = false;
}
if (xbox.isPressed(JStick.XBOX_RB)) {
pwmtest = true;
}
if (pwmtest) {
for (int i = 0; i < 10; ++i) {
if (atk.isPressed(i+1)) {
pwms[i].setRaw(143);
}
}
} else {
if (xbox.isReleased(JStick.XBOX_A)) {
shootTester = !shootTester;
lcd.println(DriverStationLCD.Line.kUser1, 1, "Shooter Tester ");
} else {
lcd.println(DriverStationLCD.Line.kUser1, 1, "Normal Tester ");
}
//Only spins shooter
shooter.set((atk.isPressed(7)) ? -1 : 0);
//Only spins the kicker
kicker.set((atk.isPressed(6)) ? 1 : 0);
//Slow start for shooting 1
if(shootTester && atk.isPressed(1)) {
if(timer.get() > 2) {
shooter.set(1);
lcd.println(DriverStationLCD.Line.kUser2, 1, "Shooter: On ");
}
if(timer.get() > 4) {
kicker.set(1);
lcd.println(DriverStationLCD.Line.kUser3, 1, "Kicker: On ");
}
if(timer.get() > 7) {
//actuator.set(1);
lcd.println(DriverStationLCD.Line.kUser4, 1, "CAM: On ");
} else {
timer.reset();
shooter.set(0);
kicker.set(0);
actuator.set(0);
lcd.println(DriverStationLCD.Line.kUser2, 1, "Shooter: Off ");
lcd.println(DriverStationLCD.Line.kUser3, 1, "Kicker: Off ");
lcd.println(DriverStationLCD.Line.kUser4, 1, "CAM: Off ");
}
}
lcd.println(DriverStationLCD.Line.kUser1, 1, "" + timer.get());
lcd.updateLCD();
}
*/
}
}
|
package batfish.main;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.FileVisitResult;
import java.nio.file.FileVisitor;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import com.logicblox.bloxweb.client.ServiceClientException;
import com.logicblox.connect.Workspace.Relation;
import com.thoughtworks.xstream.XStream;
import com.thoughtworks.xstream.io.xml.DomDriver;
import batfish.collections.EdgeSet;
import batfish.collections.FibMap;
import batfish.collections.FibRow;
import batfish.collections.FibSet;
import batfish.collections.FunctionSet;
import batfish.collections.NodeSet;
import batfish.collections.PolicyRouteFibIpMap;
import batfish.collections.PolicyRouteFibNodeMap;
import batfish.collections.PredicateSemantics;
import batfish.collections.PredicateValueTypeMap;
import batfish.collections.QualifiedNameMap;
import batfish.grammar.BatfishCombinedParser;
import batfish.grammar.ConfigurationLexer;
import batfish.grammar.ConfigurationParser;
import batfish.grammar.ParseTreePrettyPrinter;
import batfish.grammar.cisco.CiscoCombinedParser;
import batfish.grammar.cisco.controlplane.CiscoControlPlaneExtractor;
import batfish.grammar.juniper.FlatJuniperGrammarLexer;
import batfish.grammar.juniper.FlatJuniperGrammarParser;
import batfish.grammar.juniper.JuniperGrammarLexer;
import batfish.grammar.juniper.JuniperGrammarParser;
import batfish.grammar.logicblox.LogQLPredicateInfoExtractor;
import batfish.grammar.logicblox.LogiQLCombinedParser;
import batfish.grammar.logicblox.LogiQLPredicateInfoResolver;
import batfish.grammar.topology.BatfishTopologyCombinedParser;
import batfish.grammar.topology.BatfishTopologyExtractor;
import batfish.grammar.topology.GNS3TopologyCombinedParser;
import batfish.grammar.topology.GNS3TopologyExtractor;
import batfish.grammar.topology.TopologyExtractor;
import batfish.grammar.z3.ConcretizerQueryResultCombinedParser;
import batfish.grammar.z3.ConcretizerQueryResultExtractor;
import batfish.grammar.z3.DatalogQueryResultCombinedParser;
import batfish.grammar.z3.DatalogQueryResultExtractor;
import batfish.logic.LogicResourceLocator;
import batfish.logicblox.ConfigurationFactExtractor;
import batfish.logicblox.Facts;
import batfish.logicblox.LBInitializationException;
import batfish.logicblox.LBValueType;
import batfish.logicblox.LogicBloxFrontend;
import batfish.logicblox.PredicateInfo;
import batfish.logicblox.ProjectFile;
import batfish.logicblox.QueryException;
import batfish.logicblox.TopologyFactExtractor;
import batfish.representation.Configuration;
import batfish.representation.Edge;
import batfish.representation.Ip;
import batfish.representation.Topology;
import batfish.representation.VendorConfiguration;
import batfish.representation.VendorConversionException;
import batfish.representation.cisco.CiscoVendorConfiguration;
import batfish.representation.cisco.Interface;
import batfish.util.UrlZipExplorer;
import batfish.util.StringFilter;
import batfish.util.Util;
import batfish.z3.ConcretizerQuery;
import batfish.z3.MultipathInconsistencyQuerySynthesizer;
import batfish.z3.QuerySynthesizer;
import batfish.z3.Synthesizer;
public class Batfish implements AutoCloseable {
private static final String BASIC_FACTS_BLOCKNAME = "BaseFacts";
private static final String EDGES_FILENAME = "edges";
private static final String FIB_POLICY_ROUTE_NEXT_HOP_PREDICATE_NAME = "FibForwardPolicyRouteNextHopIp";
private static final String FIB_PREDICATE_NAME = "FibNetworkForward";
private static final String FIBS_FILENAME = "fibs";
private static final String FIBS_POLICY_ROUTE_NEXT_HOP_FILENAME = "fibs-policy-route";
private static final String PREDICATE_INFO_FILENAME = "predicateInfo.object";
private static final String SEPARATOR = System.getProperty("file.separator");
private static final String STATIC_FACT_BLOCK_PREFIX = "libbatfish:";
private static final String TOPOLOGY_FILENAME = "topology.net";
private static final String TOPOLOGY_PREDICATE_NAME = "LanAdjacent";
private static void initControlPlaneFactBins(
Map<String, StringBuilder> factBins) {
initFactBins(Facts.CONTROL_PLANE_FACT_COLUMN_HEADERS, factBins);
}
private static void initFactBins(Map<String, String> columnHeaderMap,
Map<String, StringBuilder> factBins) {
for (String factPredicate : columnHeaderMap.keySet()) {
String columnHeaders = columnHeaderMap.get(factPredicate);
String initialText = columnHeaders + "\n";
factBins.put(factPredicate, new StringBuilder(initialText));
}
}
private static void initTrafficFactBins(Map<String, StringBuilder> factBins) {
initFactBins(Facts.TRAFFIC_FACT_COLUMN_HEADERS, factBins);
}
private List<LogicBloxFrontend> _lbFrontends;
private PredicateInfo _predicateInfo;
private Settings _settings;
private long _timerCount;
private File _tmpLogicDir;
public Batfish(Settings settings) {
_settings = settings;
_lbFrontends = new ArrayList<LogicBloxFrontend>();
_tmpLogicDir = null;
}
private void addProject(LogicBloxFrontend lbFrontend) {
print(0, "\n*** ADDING PROJECT ***\n");
resetTimer();
String settingsLogicDir = _settings.getLogicDir();
File logicDir;
if (settingsLogicDir != null) {
logicDir = new ProjectFile(settingsLogicDir);
}
else {
logicDir = retrieveLogicDir().getAbsoluteFile();
}
String result = lbFrontend.addProject(logicDir, "");
cleanupLogicDir();
if (result != null) {
throw new BatfishException(result + "\n");
}
print(1, "SUCCESS\n");
printElapsedTime();
}
private void addStaticFacts(LogicBloxFrontend lbFrontend,
List<String> blockNames) {
print(0, "\n*** ADDING STATIC FACTS ***\n");
resetTimer();
for (String blockName : blockNames) {
print(1, "Adding " + blockName + "...");
String output = lbFrontend.execNamedBlock(STATIC_FACT_BLOCK_PREFIX
+ blockName);
if (output == null) {
print(1, "OK\n");
}
else {
throw new BatfishException(output + "\n");
}
}
print(1, "SUCCESS\n");
printElapsedTime();
}
private void anonymizeConfigurations() {
// TODO Auto-generated method stub
}
/**
* This function extracts predicate type information from the logic files. It
* is meant only to be called during the build process, and should never be
* executed from a jar
*/
private void buildPredicateInfo() {
Path logicBinDirPath = null;
URL logicSourceURL = LogicResourceLocator.class.getProtectionDomain()
.getCodeSource().getLocation();
String logicSourceString = logicSourceURL.toString();
if (logicSourceString.startsWith("onejar:")) {
throw new BatfishException(
"buildPredicateInfo() should never be called from within a jar");
}
String logicPackageResourceName = LogicResourceLocator.class.getPackage()
.getName().replace('.', SEPARATOR.charAt(0));
try {
logicBinDirPath = Paths.get(LogicResourceLocator.class
.getClassLoader().getResource(logicPackageResourceName).toURI());
}
catch (URISyntaxException e) {
throw new BatfishException("Failed to resolve logic output directory",
e);
}
Path logicSrcDirPath = Paths.get(_settings.getLogicSrcDir());
final Set<Path> logicFiles = new TreeSet<Path>();
try {
Files.walkFileTree(logicSrcDirPath,
new java.nio.file.SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file,
BasicFileAttributes attrs) throws IOException {
String name = file.getFileName().toString();
if (!name.equals("BaseFacts.logic")
&& !name.endsWith("_rules.logic")
&& !name.startsWith("service_")
&& name.endsWith(".logic")) {
logicFiles.add(file);
}
return super.visitFile(file, attrs);
}
});
}
catch (IOException e) {
throw new BatfishException("Could not make list of logic files", e);
}
PredicateValueTypeMap predicateValueTypes = new PredicateValueTypeMap();
QualifiedNameMap qualifiedNameMap = new QualifiedNameMap();
FunctionSet functions = new FunctionSet();
PredicateSemantics predicateSemantics = new PredicateSemantics();
List<ParserRuleContext> trees = new ArrayList<ParserRuleContext>();
for (Path logicFilePath : logicFiles) {
String input = readFile(logicFilePath.toFile());
LogiQLCombinedParser parser = new LogiQLCombinedParser(input);
ParserRuleContext tree = parse(parser, logicFilePath.toString());
trees.add(tree);
}
ParseTreeWalker walker = new ParseTreeWalker();
for (ParserRuleContext tree : trees) {
LogQLPredicateInfoExtractor extractor = new LogQLPredicateInfoExtractor(
predicateValueTypes);
walker.walk(extractor, tree);
}
for (ParserRuleContext tree : trees) {
LogiQLPredicateInfoResolver resolver = new LogiQLPredicateInfoResolver(
predicateValueTypes, qualifiedNameMap, functions,
predicateSemantics);
walker.walk(resolver, tree);
}
PredicateInfo predicateInfo = new PredicateInfo(predicateSemantics,
predicateValueTypes, functions, qualifiedNameMap);
File predicateInfoFile = logicBinDirPath.resolve(PREDICATE_INFO_FILENAME)
.toFile();
serializeObject(predicateInfo, predicateInfoFile);
}
private void cleanupLogicDir() {
if (_tmpLogicDir != null) {
try {
FileUtils.deleteDirectory(_tmpLogicDir);
}
catch (IOException e) {
throw new BatfishException(
"Error cleaning up temporary logic directory", e);
}
_tmpLogicDir = null;
}
}
@Override
public void close() throws Exception {
for (LogicBloxFrontend lbFrontend : _lbFrontends) {
// Close backend threads
if (lbFrontend != null && lbFrontend.connected()) {
lbFrontend.close();
}
}
}
private void computeDataPlane(LogicBloxFrontend lbFrontend) {
print(0, "\n*** COMPUTING DATA PLANE STRUCTURES ***\n");
resetTimer();
lbFrontend.initEntityTable();
print(1, "Retrieving topology information from LogicBlox..");
EdgeSet topologyEdges = getTopologyEdges(lbFrontend);
print(1, "OK\n");
String fibQualifiedName = _predicateInfo.getPredicateNames().get(
FIB_PREDICATE_NAME);
print(1, "Retrieving network FIB information from LogicBlox..");
Relation fibNetwork = lbFrontend.queryPredicate(fibQualifiedName);
print(1, "OK\n");
String fibPolicyRouteNextHopQualifiedName = _predicateInfo
.getPredicateNames().get(FIB_POLICY_ROUTE_NEXT_HOP_PREDICATE_NAME);
print(1,
"Retrieving ip FIB information from LogicBlox for policy-routing next-hop-ips..");
Relation fibPolicyRouteNextHops = lbFrontend
.queryPredicate(fibPolicyRouteNextHopQualifiedName);
print(1, "OK\n");
print(1, "Caclulating forwarding rules..");
FibMap fibs = getRouteForwardingRules(fibNetwork, lbFrontend);
PolicyRouteFibNodeMap policyRouteFibNodeMap = getPolicyRouteFibNodeMap(
fibPolicyRouteNextHops, lbFrontend);
print(1, "OK\n");
Path fibsPath = Paths.get(_settings.getDataPlaneDir(), FIBS_FILENAME);
Path fibsPolicyRoutePath = Paths.get(_settings.getDataPlaneDir(),
FIBS_POLICY_ROUTE_NEXT_HOP_FILENAME);
Path edgesPath = Paths.get(_settings.getDataPlaneDir(), EDGES_FILENAME);
print(1, "Serializing fibs..");
serializeObject(fibs, fibsPath.toFile());
print(1, "OK\n");
print(1, "Serializing policy route next hop interface map..");
serializeObject(policyRouteFibNodeMap, fibsPolicyRoutePath.toFile());
print(1, "OK\n");
print(1, "Serializing toplogy edges..");
serializeObject(topologyEdges, edgesPath.toFile());
print(1, "OK\n");
printElapsedTime();
}
private void concretize() {
print(0, "\n*** GENERATING Z3 CONCRETIZER QUERIES ***\n");
resetTimer();
String concInPath = _settings.getConcretizerInputFilePath();
print(1, "Reading z3 datalog query output file: \"" + concInPath + "\"..");
File queryOutputFile = new File(concInPath);
String queryOutputStr = readFile(queryOutputFile);
print(1, "OK\n");
DatalogQueryResultCombinedParser parser = new DatalogQueryResultCombinedParser(
queryOutputStr);
ParserRuleContext tree = parse(parser, concInPath);
print(1, "Computing concretizer queries..");
ParseTreeWalker walker = new ParseTreeWalker();
DatalogQueryResultExtractor extractor = new DatalogQueryResultExtractor();
walker.walk(extractor, tree);
print(1, "OK\n");
List<ConcretizerQuery> concretizerQueries = extractor
.getConcretizerQueries();
for (int i = 0; i < concretizerQueries.size(); i++) {
ConcretizerQuery cq = concretizerQueries.get(i);
String concQueryPath = _settings.getConcretizerOutputFilePath() + "-"
+ i + ".smt2";
print(1, "Writing concretizer query file: \"" + concQueryPath + "\"..");
writeFile(concQueryPath, cq.getText());
print(1, "OK\n");
}
printElapsedTime();
}
private LogicBloxFrontend connect() {
boolean assumedToExist = !_settings.createWorkspace();
String workspaceMaster = _settings.getWorkspaceName();
LogicBloxFrontend lbFrontend = null;
try {
lbFrontend = initFrontend(assumedToExist, workspaceMaster);
}
catch (LBInitializationException e) {
throw new BatfishException("Failed to connect to LogicBlox", e);
}
return lbFrontend;
}
public Map<String, Configuration> deserializeConfigurations(
String serializedConfigPath) {
print(1,
"\n*** DESERIALIZING VENDOR-INDEPENDENT CONFIGURATION STRUCTURES ***\n");
resetTimer();
Map<String, Configuration> configurations = new TreeMap<String, Configuration>();
File dir = new File(serializedConfigPath);
File[] serializedConfigs = dir.listFiles();
if (serializedConfigs == null) {
throw new BatfishException(
"Error reading vendor-independent configs directory");
}
for (File serializedConfig : serializedConfigs) {
String name = serializedConfig.getName();
print(2, "Reading config: \"" + serializedConfig + "\"");
Object object = deserializeObject(serializedConfig);
Configuration c = (Configuration) object;
configurations.put(name, c);
print(2, "...OK\n");
}
printElapsedTime();
return configurations;
}
private Object deserializeObject(File inputFile) {
FileInputStream fis;
Object o = null;
ObjectInputStream ois;
try {
fis = new FileInputStream(inputFile);
if (_settings.getSerializeToText()) {
XStream xstream = new XStream(new DomDriver("UTF-8"));
ois = xstream.createObjectInputStream(fis);
}
else {
ois = new ObjectInputStream(fis);
}
o = ois.readObject();
ois.close();
}
catch (IOException | ClassNotFoundException e) {
throw new BatfishException("Failed to deserialize object from file: "
+ inputFile.toString(), e);
}
return o;
}
public Map<String, VendorConfiguration> deserializeVendorConfigurations(
String serializedVendorConfigPath) {
print(1, "\n*** DESERIALIZING VENDOR CONFIGURATION STRUCTURES ***\n");
resetTimer();
Map<String, VendorConfiguration> vendorConfigurations = new TreeMap<String, VendorConfiguration>();
File dir = new File(serializedVendorConfigPath);
File[] serializedConfigs = dir.listFiles();
if (serializedConfigs == null) {
throw new BatfishException("Error reading vendor configs directory");
}
for (File serializedConfig : serializedConfigs) {
String name = serializedConfig.getName();
print(2, "Reading vendor config: \"" + serializedConfig + "\"");
Object object = deserializeObject(serializedConfig);
VendorConfiguration vc = (VendorConfiguration) object;
vendorConfigurations.put(name, vc);
print(2, "...OK\n");
}
printElapsedTime();
return vendorConfigurations;
}
private void dumpFacts(Map<String, StringBuilder> factBins) {
print(0, "\n*** DUMPING FACTS ***\n");
resetTimer();
Path factsDir = Paths.get(_settings.getDumpFactsDir());
try {
Files.createDirectories(factsDir);
for (String factsFilename : factBins.keySet()) {
String facts = factBins.get(factsFilename).toString();
Path factsFilePath = factsDir.resolve(factsFilename);
print(1, "Writing: \"" + factsFilePath.toAbsolutePath().toString()
+ "\"\n");
FileUtils.write(factsFilePath.toFile(), facts);
}
}
catch (IOException e) {
throw new BatfishException("Failed to write fact dump file", e);
}
printElapsedTime();
}
private void dumpInterfaceDescriptions(String testRigPath, String outputPath) {
Map<File, String> configurationData = readConfigurationFiles(testRigPath);
Map<String, VendorConfiguration> configs = parseVendorConfigurations(configurationData);
Map<String, VendorConfiguration> sortedConfigs = new TreeMap<String, VendorConfiguration>();
sortedConfigs.putAll(configs);
StringBuilder sb = new StringBuilder();
for (VendorConfiguration vconfig : sortedConfigs.values()) {
String node = vconfig.getHostname();
CiscoVendorConfiguration config = null;
try {
config = (CiscoVendorConfiguration) vconfig;
}
catch (ClassCastException e) {
continue;
}
Map<String, Interface> sortedInterfaces = new TreeMap<String, Interface>();
sortedInterfaces.putAll(config.getInterfaces());
for (Interface iface : sortedInterfaces.values()) {
String iname = iface.getName();
String description = iface.getDescription();
sb.append(node + " " + iname);
if (description != null) {
sb.append(" \"" + description + "\"");
}
sb.append("\n");
}
}
String output = sb.toString();
writeFile(outputPath, output);
}
public void error(int logLevel, String text) {
if (_settings.getLogLevel() >= logLevel) {
System.err.print(text);
System.err.flush();
}
}
private void genMultipathQueries() {
print(0, "\n*** GENERATING MULTIPATH-INCONSISTENCY QUERIES ***\n");
resetTimer();
String mpiQueryBasePath = _settings.getMultipathInconsistencyQueryPath();
String nodeSetPath = _settings.getNodeSetPath();
String nodeSetTextPath = nodeSetPath + ".txt";
print(1, "Reading node set from : \"" + nodeSetPath + "\"..");
NodeSet nodes = (NodeSet) deserializeObject(new File(nodeSetPath));
print(1, "OK\n");
for (String hostname : nodes) {
QuerySynthesizer synth = new MultipathInconsistencyQuerySynthesizer(
hostname);
String queryText = synth.getQueryText();
String mpiQueryPath = mpiQueryBasePath + "-" + hostname + ".smt2";
print(1, "Writing query to: \"" + mpiQueryPath + "\"..");
writeFile(mpiQueryPath, queryText);
print(1, "OK\n");
}
print(1, "Writing node lines for next stage..");
StringBuilder sb = new StringBuilder();
for (String node : nodes) {
sb.append(node + "\n");
}
writeFile(nodeSetTextPath, sb.toString());
print(1, "OK\n");
printElapsedTime();
}
private void genZ3(Map<String, Configuration> configurations) {
print(0, "\n*** GENERATING Z3 LOGIC ***\n");
resetTimer();
Path fibsPath = Paths.get(_settings.getDataPlaneDir(), FIBS_FILENAME);
Path prFibsPath = Paths.get(_settings.getDataPlaneDir(),
FIBS_POLICY_ROUTE_NEXT_HOP_FILENAME);
Path edgesPath = Paths.get(_settings.getDataPlaneDir(), EDGES_FILENAME);
print(1, "Deserializing destination route fibs..");
FibMap fibs = (FibMap) deserializeObject(fibsPath.toFile());
print(1, "OK\n");
print(1, "Deserializing policy route fibs..");
PolicyRouteFibNodeMap prFibs = (PolicyRouteFibNodeMap) deserializeObject(prFibsPath
.toFile());
print(1, "OK\n");
print(1, "Deserializing toplogy edges..");
EdgeSet topologyEdges = (EdgeSet) deserializeObject(edgesPath.toFile());
print(1, "OK\n");
print(1, "Synthesizing Z3 logic..");
Synthesizer s = new Synthesizer(configurations, fibs, prFibs,
topologyEdges, _settings.getSimplify());
try {
s.synthesize(_settings.getZ3File());
}
catch (IOException e) {
throw new Error("Failed to generated Z3 logic", e);
}
print(1, "OK\n");
print(1, "Serializing node set..");
NodeSet nodeSet = s.getNodeSet();
serializeObject(nodeSet, new File(_settings.getNodeSetPath()));
print(1, "OK\n");
printElapsedTime();
}
public Map<String, Configuration> getConfigurations(
String serializedVendorConfigPath) {
Map<String, VendorConfiguration> vendorConfigurations = deserializeVendorConfigurations(serializedVendorConfigPath);
Map<String, Configuration> configurations = parseConfigurations(vendorConfigurations);
return configurations;
}
public void getDiff() {
// Map<File, String> configurationData1 = readConfigurationFiles(_settings
// .getTestRigPath());
// Map<File, String> configurationData2 = readConfigurationFiles(_settings
// .getSecondTestRigPath());
// List<Configuration> firstConfigurations =
// parseConfigFiles(configurationData1);
// if (firstConfigurations == null) {
// quit(1);
// List<Configuration> secondConfigurations =
// parseConfigFiles(configurationData2);
// if (secondConfigurations == null) {
// quit(1);
// if (firstConfigurations.size() != secondConfigurations.size()) {
// System.out.println("Size MISMATCH");
// quit(1);
// Collections.sort(firstConfigurations);
// Collections.sort(secondConfigurations);
// boolean finalRes = true;
// for (int i = 0; i < firstConfigurations.size(); i++) {
// boolean res = (firstConfigurations.get(i).sameParseTree(
// secondConfigurations.get(i), firstConfigurations.get(i)
// .getName() + " MISMATCH"));
// if (res == false) {
// finalRes = false;
// if (finalRes == true) {
// System.out.println("MATCH");
}
private double getElapsedTime(long beforeTime) {
long difference = System.currentTimeMillis() - beforeTime;
double seconds = difference / 1000d;
return seconds;
}
private List<String> getHelpPredicates(Map<String, String> predicateSemantics) {
Set<String> helpPredicateSet = new LinkedHashSet<String>();
_settings.getHelpPredicates();
if (_settings.getHelpPredicates() == null) {
helpPredicateSet.addAll(predicateSemantics.keySet());
}
else {
helpPredicateSet.addAll(_settings.getHelpPredicates());
}
List<String> helpPredicates = new ArrayList<String>();
helpPredicates.addAll(helpPredicateSet);
Collections.sort(helpPredicates);
return helpPredicates;
}
private PolicyRouteFibNodeMap getPolicyRouteFibNodeMap(
Relation fibPolicyRouteNextHops, LogicBloxFrontend lbFrontend) {
PolicyRouteFibNodeMap nodeMap = new PolicyRouteFibNodeMap();
List<String> nodeList = new ArrayList<String>();
lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, nodeList,
fibPolicyRouteNextHops.getColumns().get(0));
List<String> ipList = new ArrayList<String>();
lbFrontend.fillColumn(LBValueType.ENTITY_INDEX_IP, ipList,
fibPolicyRouteNextHops.getColumns().get(1));
List<String> interfaces = new ArrayList<String>();
lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, interfaces,
fibPolicyRouteNextHops.getColumns().get(2));
int size = nodeList.size();
for (int i = 0; i < size; i++) {
String node = nodeList.get(i);
Ip ip = new Ip(ipList.get(i));
String iface = interfaces.get(i);
PolicyRouteFibIpMap ipMap = nodeMap.get(node);
if (ipMap == null) {
ipMap = new PolicyRouteFibIpMap();
nodeMap.put(node, ipMap);
}
ipMap.put(ip, iface);
}
return nodeMap;
}
public PredicateInfo getPredicateInfo(Map<String, String> logicFiles) {
// Get predicate semantics from rules file
print(1, "\n*** PARSING PREDICATE INFO ***\n");
resetTimer();
String predicateInfoPath = getPredicateInfoPath();
PredicateInfo predicateInfo = (PredicateInfo) deserializeObject(new File(
predicateInfoPath));
printElapsedTime();
return predicateInfo;
}
private String getPredicateInfoPath() {
File logicDir = retrieveLogicDir();
return Paths.get(logicDir.toString(), PREDICATE_INFO_FILENAME).toString();
}
private FibMap getRouteForwardingRules(Relation installedRoutes,
LogicBloxFrontend lbFrontend) {
FibMap fibs = new FibMap();
List<String> nameList = new ArrayList<String>();
lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, nameList,
installedRoutes.getColumns().get(0));
List<String> networkList = new ArrayList<String>();
lbFrontend.fillColumn(LBValueType.ENTITY_INDEX_NETWORK, networkList,
installedRoutes.getColumns().get(1));
List<String> interfaces = new ArrayList<String>();
lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, interfaces,
installedRoutes.getColumns().get(2));
String currentHostname = "";
Map<String, Integer> startIndices = new HashMap<String, Integer>();
Map<String, Integer> endIndices = new HashMap<String, Integer>();
for (int i = 0; i < nameList.size(); i++) {
String currentRowHostname = nameList.get(i);
if (!currentHostname.equals(currentRowHostname)) {
if (i > 0) {
endIndices.put(currentHostname, i - 1);
}
currentHostname = currentRowHostname;
startIndices.put(currentHostname, i);
}
}
endIndices.put(currentHostname, nameList.size() - 1);
for (String hostname : startIndices.keySet()) {
FibSet fibRows = new FibSet();
fibs.put(hostname, fibRows);
int startIndex = startIndices.get(hostname);
int endIndex = endIndices.get(hostname);
for (int i = startIndex; i <= endIndex; i++) {
String networkString = networkList.get(i);
Ip networkAddress = new Ip(
Util.getIpFromIpSubnetPair(networkString));
int prefixLength = Util
.getPrefixLengthFromIpSubnetPair(networkString);
String iface = interfaces.get(i);
fibRows.add(new FibRow(networkAddress, prefixLength, iface));
}
}
return fibs;
}
private Map<String, String> getSemanticsFiles() {
final Map<String, String> semanticsFiles = new HashMap<String, String>();
File logicDirFile = retrieveLogicDir();
FileVisitor<Path> visitor = new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
String pathString = file.toString();
if (pathString.endsWith(".semantics")) {
String contents = FileUtils.readFileToString(file.toFile());
semanticsFiles.put(pathString, contents);
}
return super.visitFile(file, attrs);
}
};
try {
Files.walkFileTree(Paths.get(logicDirFile.getAbsolutePath()), visitor);
}
catch (IOException e) {
e.printStackTrace();
}
cleanupLogicDir();
return semanticsFiles;
}
public EdgeSet getTopologyEdges(LogicBloxFrontend lbFrontend) {
EdgeSet edges = new EdgeSet();
String qualifiedName = _predicateInfo.getPredicateNames().get(
TOPOLOGY_PREDICATE_NAME);
Relation topologyRelation = lbFrontend.queryPredicate(qualifiedName);
List<String> fromRouters = new ArrayList<String>();
lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, fromRouters,
topologyRelation.getColumns().get(0));
List<String> fromInterfaces = new ArrayList<String>();
lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, fromInterfaces,
topologyRelation.getColumns().get(1));
List<String> toRouters = new ArrayList<String>();
lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, toRouters,
topologyRelation.getColumns().get(2));
List<String> toInterfaces = new ArrayList<String>();
lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, toInterfaces,
topologyRelation.getColumns().get(3));
for (int i = 0; i < fromRouters.size(); i++) {
if (Util.isLoopback(fromInterfaces.get(i))
|| Util.isLoopback(toInterfaces.get(i))) {
continue;
}
Edge newEdge = new Edge(fromRouters.get(i), fromInterfaces.get(i),
toRouters.get(i), toInterfaces.get(i));
edges.add(newEdge);
}
return edges;
}
/**
* Generates a topology object from inferred edges encoded in interface
* descriptions.
*
* @param configurations
* The vendor specific configurations.
* @param includeExternal
* Whether to include edges to nodes for which configuration files
* were not supplied (used for debugging).
* @return The inferred topology.
*/
private Topology inferTopologyFromInterfaceDescriptions(
Map<String, Configuration> configurations, boolean IncludeExternal) {
// TODO Auto-generated method stub
return null;
}
public LogicBloxFrontend initFrontend(boolean assumedToExist,
String workspace) throws LBInitializationException {
print(1, "\n*** STARTING CONNECTBLOX SESSION ***\n");
resetTimer();
LogicBloxFrontend lbFrontend = new LogicBloxFrontend(
_settings.getConnectBloxHost(), _settings.getConnectBloxPort(),
_settings.getLbWebPort(), _settings.getLbWebAdminPort(), workspace,
assumedToExist);
lbFrontend.initialize();
if (!lbFrontend.connected()) {
throw new BatfishException(
"Error connecting to ConnectBlox service. Please make sure service is running and try again.");
}
print(1, "SUCCESS\n");
printElapsedTime();
_lbFrontends.add(lbFrontend);
return lbFrontend;
}
private ParserRuleContext parse(BatfishCombinedParser<?, ?> parser) {
ParserRuleContext tree = parser.parse();
List<String> errors = parser.getErrors();
int numErrors = errors.size();
if (numErrors > 0) {
error(1, numErrors + " ERROR(S)\n");
for (int i = 0; i < numErrors; i++) {
String prefix = "ERROR " + (i + 1) + ": ";
String msg = errors.get(i);
String prefixedMsg = Util.applyPrefix(prefix, msg);
error(1, prefixedMsg + "\n");
}
throw new BatfishException("Exiting due to parser errors");
}
else if (!_settings.printParseTree()) {
print(1, "OK\n");
}
else {
print(0, "OK, PRINTING PARSE TREE:\n");
print(0, ParseTreePrettyPrinter.print(tree, parser) + "\n\n");
}
return tree;
}
private ParserRuleContext parse(BatfishCombinedParser<?, ?> parser,
String filename) {
print(1, "Parsing: \"" + filename + "\"..");
return parse(parser);
}
private Map<String, Configuration> parseConfigurations(
Map<String, VendorConfiguration> vendorConfigurations) {
boolean processingError = false;
Map<String, Configuration> configurations = new TreeMap<String, Configuration>();
print(1,
"\n*** CONVERTING VENDOR CONFIGURATIONS TO INDEPENDENT FORMAT ***\n");
resetTimer();
for (String name : vendorConfigurations.keySet()) {
print(2, "Processing: \"" + name + "\"");
VendorConfiguration vc = vendorConfigurations.get(name);
try {
Configuration config = vc.toVendorIndependentConfiguration();
configurations.put(name, config);
}
catch (VendorConversionException e) {
error(0, "...CONVERSION ERROR\n");
error(0, ExceptionUtils.getStackTrace(e));
if (_settings.exitOnParseError()) {
return null;
}
else {
processingError = true;
continue;
}
}
List<String> conversionWarnings = vc.getConversionWarnings();
int numWarnings = conversionWarnings.size();
if (numWarnings > 0) {
print(2, "..." + numWarnings + " WARNING(S)\n");
for (String warning : conversionWarnings) {
print(2, "\tconverter: " + warning + "\n");
}
}
else {
print(2, " ...OK\n");
}
}
if (processingError) {
return null;
}
else {
printElapsedTime();
return configurations;
}
}
private void parseFlowsFromConstraints(StringBuilder sb) {
Path flowConstraintsDir = Paths.get(_settings.getFlowPath());
File[] constraintsFiles = flowConstraintsDir.toFile().listFiles(
new FilenameFilter() {
@Override
public boolean accept(File dir, String filename) {
return filename.matches(".*-concrete-.*.smt2.out");
}
});
if (constraintsFiles == null) {
throw new BatfishException("Error reading flow constraints directory");
}
for (File constraintsFile : constraintsFiles) {
String flowConstraintsText = readFile(constraintsFile);
ConcretizerQueryResultCombinedParser parser = new ConcretizerQueryResultCombinedParser(
flowConstraintsText);
ParserRuleContext tree = parse(parser, constraintsFile.toString());
ParseTreeWalker walker = new ParseTreeWalker();
ConcretizerQueryResultExtractor extractor = new ConcretizerQueryResultExtractor();
walker.walk(extractor, tree);
String node = extractor.getNode();
if (node == null) {
continue;
}
Map<String, Long> constraints = extractor.getConstraints();
long src_ip = 0;
long dst_ip = 0;
long src_port = 0;
long dst_port = 0;
long protocol = 0;
for (String varName : constraints.keySet()) {
Long value = constraints.get(varName);
switch (varName) {
case Synthesizer.SRC_IP_VAR:
src_ip = value;
break;
case Synthesizer.DST_IP_VAR:
dst_ip = value;
break;
case Synthesizer.SRC_PORT_VAR:
src_port = value;
break;
case Synthesizer.DST_PORT_VAR:
dst_port = value;
break;
case Synthesizer.IP_PROTOCOL_VAR:
protocol = value;
break;
default:
throw new Error("invalid variable name");
}
}
String line = node + "|" + src_ip + "|" + dst_ip + "|" + src_port
+ "|" + dst_port + "|" + protocol + "\n";
sb.append(line);
}
}
private void parseTopology(String testRigPath, String topologyFileText,
Map<String, StringBuilder> factBins) {
BatfishCombinedParser<?, ?> parser = null;
TopologyExtractor extractor = null;
Topology topology = null;
File topologyPath = Paths.get(testRigPath, "topology.net").toFile();
print(1, "Parsing: \"" + topologyPath.getAbsolutePath() + "\"");
if (topologyFileText.startsWith("autostart")) {
parser = new GNS3TopologyCombinedParser(topologyFileText);
extractor = new GNS3TopologyExtractor();
}
else if (topologyFileText.startsWith("CONFIGPARSER_TOPOLOGY")) {
parser = new BatfishTopologyCombinedParser(topologyFileText);
extractor = new BatfishTopologyExtractor();
}
else if (topologyFileText.equals("")) {
error(1, "...WARNING: empty topology\n");
return;
}
else {
error(0, "...ERROR\n");
throw new Error("Topology format error");
}
ParserRuleContext tree = parse(parser);
ParseTreeWalker walker = new ParseTreeWalker();
walker.walk(extractor, tree);
topology = extractor.getTopology();
TopologyFactExtractor tfe = new TopologyFactExtractor(topology);
tfe.writeFacts(factBins);
}
private Map<String, VendorConfiguration> parseVendorConfigurations(
Map<File, String> configurationData) {
print(1, "\n*** PARSING VENDOR CONFIGURATION FILES ***\n");
resetTimer();
Map<String, VendorConfiguration> vendorConfigurations = new TreeMap<String, VendorConfiguration>();
boolean processingError = false;
for (File currentFile : configurationData.keySet()) {
String fileText = configurationData.get(currentFile);
String currentPath = currentFile.getAbsolutePath();
ConfigurationParser parser = null;
ConfigurationLexer lexer = null;
VendorConfiguration vc = null;
ANTLRStringStream in = new ANTLRStringStream(fileText);
CommonTokenStream tokens;
if (fileText.length() == 0) {
continue;
}
CiscoControlPlaneExtractor extractor = null;
boolean antlr4 = false;
if (fileText.charAt(0) == '!') {
// antlr 4 stuff
antlr4 = true;
BatfishCombinedParser<?, ?> combinedParser = new CiscoCombinedParser(
fileText);
ParserRuleContext tree = parse(combinedParser, currentPath);
extractor = new CiscoControlPlaneExtractor(fileText, combinedParser);
ParseTreeWalker walker = new ParseTreeWalker();
walker.walk(extractor, tree);
for (String warning : extractor.getWarnings()) {
error(2, warning);
}
vc = extractor.getVendorConfiguration();
assert Boolean.TRUE;
}
else if ((fileText.indexOf("set version") >= 0)
&& ((fileText.indexOf("set version") == 0) || (fileText
.charAt(fileText.indexOf("set version") - 1) == '\n'))) {
lexer = new FlatJuniperGrammarLexer(in);
tokens = new CommonTokenStream(lexer);
parser = new FlatJuniperGrammarParser(tokens);
}
else if (fileText.charAt(0) == '
lexer = new JuniperGrammarLexer(in);
tokens = new CommonTokenStream(lexer);
parser = new JuniperGrammarParser(tokens);
}
else {
continue;
}
if (!antlr4) {
print(2, "Parsing: \"" + currentPath + "\"");
try {
vc = parser.parse_configuration();
}
catch (Exception e) {
error(0, " ...ERROR\n");
e.printStackTrace();
}
List<String> parserErrors = parser.getErrors();
List<String> lexerErrors = lexer.getErrors();
int numErrors = parserErrors.size() + lexerErrors.size();
if (numErrors > 0) {
error(0, " ..." + numErrors + " ERROR(S)\n");
for (String msg : lexer.getErrors()) {
error(2, "\tlexer: " + msg + "\n");
}
for (String msg : parser.getErrors()) {
error(2, "\tparser: " + msg + "\n");
}
if (_settings.exitOnParseError()) {
return null;
}
else {
processingError = true;
continue;
}
}
else {
print(2, "...OK\n");
}
}
// at this point we should have a VendorConfiguration vc
if (vendorConfigurations.containsKey(vc.getHostname()))
throw new Error("Duplicate hostname \"" + vc.getHostname() + "\" found in " + currentFile + "\n");
vendorConfigurations.put(vc.getHostname(), vc);
}
if (processingError) {
return null;
}
else {
printElapsedTime();
return vendorConfigurations;
}
}
private void populateConfigurationFactBins(
Collection<Configuration> configurations,
Map<String, StringBuilder> factBins) {
print(1, "\n*** EXTRACTING LOGICBLOX FACTS FROM CONFIGURATIONS ***\n");
resetTimer();
Set<Long> communities = new LinkedHashSet<Long>();
for (Configuration c : configurations) {
communities.addAll(c.getCommunities());
}
for (Configuration c : configurations) {
ConfigurationFactExtractor cfe = new ConfigurationFactExtractor(c,
communities, factBins);
cfe.writeFacts();
}
printElapsedTime();
}
private void postFacts(LogicBloxFrontend lbFrontend,
Map<String, StringBuilder> factBins) {
print(1, "\n*** POSTING FACTS TO BLOXWEB SERVICES ***\n");
resetTimer();
print(1, "Starting bloxweb services..");
lbFrontend.startLbWebServices();
print(1, "OK\n");
print(1, "Posting facts..");
try {
lbFrontend.postFacts(factBins);
}
catch (ServiceClientException e) {
throw new BatfishException("Failed to post facts to bloxweb services",
e);
}
print(1, "OK\n");
print(1, "Stopping bloxweb services..");
lbFrontend.stopLbWebServices();
print(1, "OK\n");
print(1, "SUCCESS\n");
printElapsedTime();
}
public void print(int logLevel, String text) {
if (_settings.getLogLevel() >= logLevel) {
System.out.print(text);
System.out.flush();
}
}
private void printAllPredicateSemantics(
Map<String, String> predicateSemantics) {
// Get predicate semantics from rules file
print(1, "\n*** PRINTING PREDICATE SEMANTICS ***\n");
List<String> helpPredicates = getHelpPredicates(predicateSemantics);
for (String predicate : helpPredicates) {
printPredicateSemantics(predicate);
print(0, "\n");
}
}
private void printElapsedTime() {
double seconds = getElapsedTime(_timerCount);
print(1, "Time taken for this task: " + seconds + " seconds\n");
}
private void printPredicate(LogicBloxFrontend lbFrontend,
String predicateName) {
List<String> output;
printPredicateSemantics(predicateName);
String qualifiedName = _predicateInfo.getPredicateNames().get(
predicateName);
if (qualifiedName == null) { // predicate not found
error(0, "ERROR: No information for predicate: " + predicateName
+ "\n");
return;
}
Relation relation = lbFrontend.queryPredicate(qualifiedName);
try {
output = lbFrontend.getPredicate(_predicateInfo, relation,
predicateName);
for (String match : output) {
print(0, match);
}
}
catch (QueryException q) {
error(0, q.getMessage() + "\n");
}
}
private void printPredicateCount(LogicBloxFrontend lbFrontend,
String predicateName) {
int numRows = lbFrontend.queryPredicate(predicateName).getColumns()
.get(0).size();
String output = "|" + predicateName + "| = " + numRows + "\n";
print(0, output);
}
public void printPredicateCounts(LogicBloxFrontend lbFrontend,
Set<String> predicateNames) {
// Print predicate(s) here
print(0, "\n*** SUBMITTING QUERY(IES) ***\n");
resetTimer();
for (String predicateName : predicateNames) {
printPredicateCount(lbFrontend, predicateName);
// print(0, "\n");
}
printElapsedTime();
}
public void printPredicates(LogicBloxFrontend lbFrontend,
Set<String> predicateNames) {
// Print predicate(s) here
print(0, "\n*** SUBMITTING QUERY(IES) ***\n");
resetTimer();
for (String predicateName : predicateNames) {
printPredicate(lbFrontend, predicateName);
print(0, "\n");
}
printElapsedTime();
}
private void printPredicateSemantics(String predicateName) {
String semantics = _predicateInfo.getPredicateSemantics(predicateName);
if (semantics == null) {
semantics = "<missing>";
}
print(0, "Predicate: " + predicateName + "\n");
print(0, "Semantics: " + semantics + "\n");
}
private Map<File, String> readConfigurationFiles(String testRigPath) {
print(1, "\n*** READING CONFIGURATION FILES ***\n");
resetTimer();
Map<File, String> configurationData = new TreeMap<File, String>();
File configsPath = Paths.get(testRigPath, "configs").toFile();
File[] configFilePaths = configsPath.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return !name.startsWith(".");
}
});
if (configFilePaths == null) {
throw new BatfishException("Error reading test rig configs directory");
}
for (File file : configFilePaths) {
print(2, "Reading: \"" + file.toString() + "\"\n");
String fileText = readFile(file.getAbsoluteFile());
configurationData.put(file, fileText);
}
printElapsedTime();
return configurationData;
}
public String readFile(File file) {
String text = null;
try {
text = FileUtils.readFileToString(file);
}
catch (IOException e) {
throw new BatfishException("Failed to read file: " + file.toString(),
e);
}
return text;
}
private void resetTimer() {
_timerCount = System.currentTimeMillis();
}
private File retrieveLogicDir() {
File logicDirFile = null;
final String locatorFilename = LogicResourceLocator.class.getSimpleName()
+ ".class";
URL logicSourceURL = LogicResourceLocator.class.getProtectionDomain()
.getCodeSource().getLocation();
String logicSourceString = logicSourceURL.toString();
UrlZipExplorer zip = null;
StringFilter lbFilter = new StringFilter() {
@Override
public boolean accept(String filename) {
return filename.endsWith(".lbb") || filename.endsWith(".lbp")
|| filename.endsWith(".semantics")
|| filename.endsWith(locatorFilename)
|| filename.endsWith(PREDICATE_INFO_FILENAME);
}
};
if (logicSourceString.startsWith("onejar:")) {
FileVisitor<Path> visitor = null;
try {
zip = new UrlZipExplorer(logicSourceURL);
Path destinationDir = Files.createTempDirectory("lbtmpproject");
File destinationDirAsFile = destinationDir.toFile();
zip.extractFiles(lbFilter, destinationDirAsFile);
visitor = new SimpleFileVisitor<Path>() {
private String _projectDirectory;
@Override
public String toString() {
return _projectDirectory;
}
@Override
public FileVisitResult visitFile(Path aFile,
BasicFileAttributes aAttrs) throws IOException {
if (aFile.endsWith(locatorFilename)) {
_projectDirectory = aFile.getParent().toString();
return FileVisitResult.TERMINATE;
}
return FileVisitResult.CONTINUE;
}
};
Files.walkFileTree(destinationDir, visitor);
_tmpLogicDir = destinationDirAsFile;
}
catch (IOException e) {
throw new BatfishException(
"Failed to retrieve logic dir from onejar archive", e);
}
String fileString = visitor.toString();
return new File(fileString);
}
else {
String logicPackageResourceName = LogicResourceLocator.class
.getPackage().getName().replace('.', SEPARATOR.charAt(0));
try {
logicDirFile = new File(LogicResourceLocator.class.getClassLoader()
.getResource(logicPackageResourceName).toURI());
}
catch (URISyntaxException e) {
throw new BatfishException("Failed to resolve logic directory", e);
}
return logicDirFile;
}
}
private void revert(LogicBloxFrontend lbFrontend) {
print(1, "\n*** REVERTING WORKSPACE ***\n");
String workspaceName = new File(_settings.getTestRigPath()).getName();
String branchName = _settings.getBranchName();
print(2, "Reverting workspace: \"" + workspaceName + "\" to branch: \""
+ branchName + "\n");
String errorResult = lbFrontend.revertDatabase(branchName);
if (errorResult != null) {
throw new BatfishException("Failed to revert database: " + errorResult);
}
}
public void run() {
if (_settings.redirectStdErr()) {
System.setErr(System.out);
}
if (_settings.getBuildPredicateInfo()) {
buildPredicateInfo();
return;
}
if (_settings.getZ3()) {
Map<String, Configuration> configurations = deserializeConfigurations(_settings
.getSerializeIndependentPath());
genZ3(configurations);
return;
}
if (_settings.getAnonymize()) {
anonymizeConfigurations();
return;
}
if (_settings.getGenerateMultipathInconsistencyQuery()) {
genMultipathQueries();
return;
}
if (_settings.getSerializeVendor()) {
String testRigPath = _settings.getTestRigPath();
String outputPath = _settings.getSerializeVendorPath();
serializeVendorConfigs(testRigPath, outputPath);
return;
}
if (_settings.dumpInterfaceDescriptions()) {
String testRigPath = _settings.getTestRigPath();
String outputPath = _settings.getDumpInterfaceDescriptionsPath();
dumpInterfaceDescriptions(testRigPath, outputPath);
return;
}
if (_settings.getSerializeIndependent()) {
String inputPath = _settings.getSerializeVendorPath();
String outputPath = _settings.getSerializeIndependentPath();
serializeIndependentConfigs(inputPath, outputPath);
return;
}
if (_settings.getDiff()) {
getDiff();
return;
}
if (_settings.getConcretize()) {
concretize();
return;
}
if (_settings.getQuery() || _settings.getPrintSemantics()
|| _settings.getDataPlane()) {
Map<String, String> logicFiles = getSemanticsFiles();
_predicateInfo = getPredicateInfo(logicFiles);
// Print predicate semantics and quit if requested
if (_settings.getPrintSemantics()) {
printAllPredicateSemantics(_predicateInfo.getPredicateSemantics());
return;
}
}
Map<String, StringBuilder> cpFactBins = null;
if (_settings.getFacts() || _settings.getDumpControlPlaneFacts()) {
cpFactBins = new LinkedHashMap<String, StringBuilder>();
initControlPlaneFactBins(cpFactBins);
Map<String, Configuration> configurations = deserializeConfigurations(_settings
.getSerializeIndependentPath());
writeTopologyFacts(_settings.getTestRigPath(), configurations,
cpFactBins);
writeConfigurationFacts(configurations, cpFactBins);
if (_settings.getDumpControlPlaneFacts()) {
dumpFacts(cpFactBins);
}
if (!(_settings.getFacts() || _settings.createWorkspace())) {
return;
}
}
// Start frontend
LogicBloxFrontend lbFrontend = null;
if (_settings.createWorkspace() || _settings.getFacts()
|| _settings.getQuery() || _settings.getDataPlane()
|| _settings.revert()) {
lbFrontend = connect();
}
if (_settings.revert()) {
revert(lbFrontend);
return;
}
// Create new workspace (will overwrite existing) if requested
if (_settings.createWorkspace()) {
addProject(lbFrontend);
if (!_settings.getFacts()) {
return;
}
}
// Post facts if requested
if (_settings.getFacts()) {
addStaticFacts(lbFrontend,
Collections.singletonList(BASIC_FACTS_BLOCKNAME));
postFacts(lbFrontend, cpFactBins);
return;
}
if (_settings.getQuery()) {
lbFrontend.initEntityTable();
Map<String, String> allPredicateNames = _predicateInfo
.getPredicateNames();
Set<String> predicateNames = new TreeSet<String>();
if (_settings.getQueryAll()) {
predicateNames.addAll(allPredicateNames.keySet());
}
else {
predicateNames.addAll(_settings.getPredicates());
}
if (_settings.getCountsOnly()) {
printPredicateCounts(lbFrontend, predicateNames);
}
else {
printPredicates(lbFrontend, predicateNames);
}
return;
}
if (_settings.getDataPlane()) {
computeDataPlane(lbFrontend);
return;
}
Map<String, StringBuilder> trafficFactBins = null;
if (_settings.getFlows() || _settings.getDumpTrafficFacts()) {
trafficFactBins = new LinkedHashMap<String, StringBuilder>();
initTrafficFactBins(trafficFactBins);
writeTrafficFacts(trafficFactBins);
if (_settings.getDumpTrafficFacts()) {
dumpFacts(trafficFactBins);
}
if (_settings.getFlows()) {
lbFrontend = connect();
postFacts(lbFrontend, trafficFactBins);
return;
}
}
throw new BatfishException(
"No task performed! Run with -help flag to see usage\n");
}
private void serializeIndependentConfigs(String vendorConfigPath,
String outputPath) {
Map<String, Configuration> configurations = getConfigurations(vendorConfigPath);
print(1,
"\n*** SERIALIZING VENDOR-INDEPENDENT CONFIGURATION STRUCTURES ***\n");
resetTimer();
for (String name : configurations.keySet()) {
Configuration c = configurations.get(name);
Path currentOutputPath = Paths.get(outputPath, name);
print(2,
"Serializing: \"" + name + "\" ==> \""
+ currentOutputPath.toString() + "\"");
serializeObject(c, currentOutputPath.toFile());
print(2, " ...OK\n");
}
printElapsedTime();
}
private void serializeObject(Object object, File outputFile) {
FileOutputStream fos;
ObjectOutputStream oos;
try {
fos = new FileOutputStream(outputFile);
if (_settings.getSerializeToText()) {
XStream xstream = new XStream(new DomDriver("UTF-8"));
oos = xstream.createObjectOutputStream(fos);
}
else {
oos = new ObjectOutputStream(fos);
}
oos.writeObject(object);
oos.close();
}
catch (IOException e) {
throw new BatfishException(
"Failed to serialize object to output file: "
+ outputFile.toString(), e);
}
}
private void serializeVendorConfigs(String testRigPath, String outputPath) {
Map<File, String> configurationData = readConfigurationFiles(testRigPath);
Map<String, VendorConfiguration> vendorConfigurations = parseVendorConfigurations(configurationData);
if (vendorConfigurations == null) {
throw new BatfishException("Exiting due to parser errors\n");
}
print(1, "\n*** SERIALIZING VENDOR CONFIGURATION STRUCTURES ***\n");
resetTimer();
new File(outputPath).mkdirs();
for (String name : vendorConfigurations.keySet()) {
VendorConfiguration vc = vendorConfigurations.get(name);
Path currentOutputPath = Paths.get(outputPath, name);
print(2,
"Serializing: \"" + name + "\" ==> \""
+ currentOutputPath.toString() + "\"");
serializeObject(vc, currentOutputPath.toFile());
print(2, " ...OK\n");
}
printElapsedTime();
}
public void writeConfigurationFacts(
Map<String, Configuration> configurations,
Map<String, StringBuilder> factBins) {
populateConfigurationFactBins(configurations.values(), factBins);
}
private void writeFile(String outputPath, String output) {
File outputFile = new File(outputPath);
try {
FileUtils.write(outputFile, output);
}
catch (IOException e) {
throw new BatfishException("Failed to write file: " + outputPath, e);
}
}
public void writeTopologyFacts(String testRigPath,
Map<String, Configuration> configurations,
Map<String, StringBuilder> factBins) {
print(1, "*** PARSING TOPOLOGY ***\n");
resetTimer();
// TODO: Use flag to extract topology from interface descriptions.
if (Boolean.FALSE) {
Topology topology = inferTopologyFromInterfaceDescriptions(
configurations, /* Include external nodes (debug) */true);
// TODO: Get from flag.
String topologyDotFile = "/home/david/Projects/usc-configs/topology/topology.dot";
if (!topologyDotFile.isEmpty()) {
try {
FileOutputStream out = new FileOutputStream(topologyDotFile);
topology.dumpDot(out);
out.close();
}
catch (IOException e) {
throw new BatfishException("Unable to write topology dot-file.",
e);
}
}
}
else {
Path topologyFilePath = Paths.get(testRigPath, TOPOLOGY_FILENAME);
// Get generated facts from topology file
String topologyFileText = null;
boolean guess = false;
if (Files.exists(topologyFilePath)) {
topologyFileText = readFile(topologyFilePath.toFile());
}
else {
// tell logicblox to guess adjacencies based on interface
// subnetworks
print(1,
"*** (GUESSING TOPOLOGY IN ABSENCE OF EXPLICIT FILE) ***\n");
StringBuilder wGuessTopology = factBins.get("GuessTopology");
wGuessTopology.append("1\n");
guess = true;
}
if (!guess) {
parseTopology(testRigPath, topologyFileText, factBins);
}
}
printElapsedTime();
}
private void writeTrafficFacts(Map<String, StringBuilder> factBins) {
StringBuilder wSetFlowOriginate = factBins.get("SetFlowOriginate");
parseFlowsFromConstraints(wSetFlowOriginate);
}
}
|
package abra;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions;
import java.security.CodeSource;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import java.util.UUID;
import abra.JunctionUtils.TooManyJunctionPermutationsException;
import abra.ReadEvaluator.Alignment;
import abra.ContigAligner.ContigAlignerResult;
import abra.SimpleMapper.Orientation;
import htsjdk.samtools.SAMFileHeader;
import htsjdk.samtools.SAMProgramRecord;
import htsjdk.samtools.SAMRecord;
import htsjdk.samtools.SAMSequenceRecord;
import htsjdk.samtools.SamReader;
/**
* ABRA's main entry point
*
* @author Lisle E. Mose (lmose at unc dot edu)
*/
public class ReAligner {
public static int MAX_REGION_LENGTH = 400;
private static int MIN_REGION_REMAINDER = 200;
public static int REGION_OVERLAP = 200;
// Minimum sequence length recommended for use with bwa mem
private static final int MIN_CONTIG_LENGTH = 70;
// Cannot be larger than buffer in assembler.c
private static final int MAX_KMER_SIZE = 199;
private SAMFileHeader[] samHeaders;
private List<Feature> regions;
private String regionsBed;
private String reference;
private AssemblerSettings assemblerSettings;
private int numThreads;
private String[] inputSams;
private int readLength = -1;
private int maxMapq = -1;
private int minInsertLength = Integer.MAX_VALUE;
private int maxInsertLength = -1;
private boolean isPairedEnd = false;
private BufferedWriter contigWriter = null;
public static CompareToReference2 c2r;
private ThreadManager threadManager;
private int minMappingQuality;
private double maxMismatchRate;
private boolean isDebug;
private boolean isSkipAssembly;
private boolean useSoftClippedReads;
private boolean useObservedIndels;
private boolean useConsensusSeq;
private boolean isKeepTmp;
private String tmpDir;
private int finalCompressionLevel;
// If true, the input target file specifies kmer values
private boolean hasPresetKmers = false;
private String contigFile = null;
// RNA specific
private String junctionFile;
private String gtfJunctionFile;
private Set<Feature> junctions = new HashSet<Feature>();
private ReverseComplementor rc = new ReverseComplementor();
private String version = "unknown";
private String cl = "unknown";
private int[] swScoring;
private int[] softClipParams;
private int maxCachedReads = 0;
private int maxReadsInRegion;
private int minAnchorLen;
private int maxAnchorMismatches;
private SortedSAMWriter writer;
private ChromosomeChunker chromosomeChunker;
public void reAlign(String[] inputFiles, String[] outputFiles) throws Exception {
this.inputSams = inputFiles;
logStartupInfo(outputFiles);
String tempDir = init();
c2r = new CompareToReference2();
c2r.init(this.reference);
chromosomeChunker = new ChromosomeChunker(c2r);
chromosomeChunker.init();
Logger.info("Reading Input SAM Header and identifying read length");
getSamHeaderAndReadLength();
Logger.info("Read length: " + readLength);
Logger.info("Loading target regions");
loadRegions();
loadJunctions();
Clock clock = new Clock("Realignment");
clock.start();
if (contigFile != null) {
contigWriter = new BufferedWriter(new FileWriter(contigFile, false));
}
for (int i=0; i<inputSams.length; i++) {
SAMProgramRecord pg = new SAMProgramRecord("ABRA2");
pg.setProgramVersion(this.version);
pg.setCommandLine(cl);
samHeaders[i].addProgramRecord(pg);
}
writer = new SortedSAMWriter(outputFiles, tempDir.toString(), samHeaders, isKeepTmp, chromosomeChunker, finalCompressionLevel);
// Spawn thread for each chromosome
// TODO: Validate identical sequence dictionary for each input file
for (int i=0; i<this.chromosomeChunker.getChunks().size(); i++) {
spawnChromosomeThread(i);
}
// for (SAMSequenceRecord seqRecord : this.samHeaders[0].getSequenceDictionary().getSequences()) {
// String chromosome = seqRecord.getSequenceName();
// this.spawnChromosomeThread(chromosomeChunkIdx);
Logger.info("Waiting for processing threads to complete");
threadManager.waitForAllThreadsToComplete();
if (contigWriter != null) {
contigWriter.close();
}
clock.stopAndPrint();
clock = new Clock("Sort and cleanup");
clock.start();
// Cut num threads in half to allow for async writer thread
threadManager = new ThreadManager(Math.max(numThreads / 2, 1));
for (int i=0; i<outputFiles.length; i++) {
SortedSAMWriterRunnable thread = new SortedSAMWriterRunnable(threadManager, writer, i, inputSams[i]);
threadManager.spawnThread(thread);
}
Logger.info("Waiting for writer threads to complete");
threadManager.waitForAllThreadsToComplete();
clock.stopAndPrint();
Logger.info("Done.");
}
void processChromosomeChunk(int chromosomeChunkIdx) throws Exception {
Feature chromosomeChunk = chromosomeChunker.getChunks().get(chromosomeChunkIdx);
String chromosome = chromosomeChunk.getSeqname();
Logger.info("Processing chromosome chunk: " + chromosomeChunk);
Clock clock = new Clock("Chromosome: " + chromosomeChunk);
clock.start();
writer.initChromosomeChunk(chromosomeChunkIdx);
MultiSamReader reader = new MultiSamReader(this.inputSams, this.minMappingQuality, this.isPairedEnd, chromosomeChunk);
List<List<SAMRecordWrapper>> currReads = new ArrayList<List<SAMRecordWrapper>>();
for (int i=0; i<this.inputSams.length; i++) {
currReads.add(new ArrayList<SAMRecordWrapper>());
}
List<List<SAMRecordWrapper>> outOfRegionReads = new ArrayList<List<SAMRecordWrapper>>();
for (int i=0; i<this.inputSams.length; i++) {
outOfRegionReads.add(new ArrayList<SAMRecordWrapper>());
}
Map<Feature, Map<SimpleMapper, ContigAlignerResult>> regionContigs = new HashMap<Feature, Map<SimpleMapper, ContigAlignerResult>>();
int readCount = 0;
// Identify regions overlapping the current chromosome chunk
List<Feature> chromosomeRegions = new ArrayList<Feature>();
for (Feature region : regions) {
if (region.getSeqname().equals(chromosome)) {
if (region.getStart() > chromosomeChunk.getStart()-MAX_REGION_LENGTH && region.getEnd() < chromosomeChunk.getEnd()+MAX_REGION_LENGTH) {
chromosomeRegions.add(region);
}
}
}
List<Feature> chromosomeJunctions = new ArrayList<Feature>();
for (Feature junction : junctions) {
if (junction.getSeqname().equals(chromosome)) {
chromosomeJunctions.add(junction);
}
}
Map<Feature, List<Feature>> regionJunctions = JunctionUtils.getRegionJunctions(chromosomeRegions, chromosomeJunctions, readLength, MAX_REGION_LENGTH);
Set<Integer> regionsToProcess = new TreeSet<Integer>();
int currRegionIdx = -1;
for (SAMRecordWrapper record : reader) {
// If this is an unmapped read anchored by its mate, check rc flag
SAMRecord read1 = record.getSamRecord();
if (read1.getReadUnmappedFlag() && !read1.getMateUnmappedFlag()) {
if (!read1.getReadNegativeStrandFlag() && !read1.getMateNegativeStrandFlag()) {
// Both ends in forward orientation. Reverse the unmapped read
read1.setReadString(rc.reverseComplement(read1.getReadString()));
read1.setBaseQualityString(rc.reverse(read1.getBaseQualityString()));
read1.setReadNegativeStrandFlag(true);
} else if (read1.getReadNegativeStrandFlag() && read1.getMateNegativeStrandFlag()) {
// Both ends in reverse orientation. Reverse the unmapped read
read1.setReadString(rc.reverseComplement(read1.getReadString()));
read1.setBaseQualityString(rc.reverse(read1.getBaseQualityString()));
read1.setReadNegativeStrandFlag(false);
}
}
List<Integer> overlappingRegions = Feature.findAllOverlappingRegions(reader.getSAMFileHeader(), record, chromosomeRegions, currRegionIdx);
// int regionIdx = Feature.findFirstOverlappingRegion(reader.getSAMFileHeader(), record, chromosomeRegions, currRegionIdx);
// Identify next region that is a candidate for processing
// Note: Splicing can cause reads to go in and out of a region
// if (regionIdx >= 0) {
if (!overlappingRegions.isEmpty()) {
regionsToProcess.addAll(overlappingRegions);
}
// Cache read for processing at end of region
currReads.get(record.getSampleIdx()).add(record);
Iterator<Integer> regionIter = regionsToProcess.iterator();
if (regionIter.hasNext()) {
currRegionIdx = regionIter.next();
// If start position for current read is beyond current region, trigger assembly
Feature currRegion = chromosomeRegions.get(currRegionIdx);
if (record.getAdjustedAlignmentStart() > currRegion.getEnd() + this.readLength*2) {
Logger.debug("Processing region: %s", currRegion);
Map<SimpleMapper, ContigAlignerResult> mappedContigs = processRegion(currRegion, currReads, regionJunctions.get(currRegion));
Logger.debug("Region: %s assembled: %d contigs", currRegion, mappedContigs.keySet().size());
regionContigs.put(currRegion, mappedContigs);
// Remove curr region from list of regions to process
regionIter.remove();
}
}
/*
// TODO: Consider dropping this... Reads are out of scope when we've moved beyond them via standard processing?
if (overlappingRegions.isEmpty()) {
// Process out of region read and output if ready.
List<SAMRecordWrapper> outOfRegionReadsForSample = outOfRegionReads.get(record.getSampleIdx());
outOfRegionReadsForSample.add(record);
if (outOfRegionReads.get(record.getSampleIdx()).size() > 2500) {
for (SAMRecordWrapper outOfRegionRead : outOfRegionReadsForSample) {
this.writer.addAlignment(record.getSampleIdx(), outOfRegionRead.getSamRecord());
}
outOfRegionReadsForSample.clear();
}
}
*/
// Todo - make constant or parameterize
int MAX_READ_RANGE = 1000 + this.readLength;
// Check for out of scope reads every 2500 reads (TODO: is 2500 the best number?)
if (readCount % 2500 == 0) {
// Remap / output / clear out of scope reads
List<List<SAMRecordWrapper>> readsToRemap = new ArrayList<List<SAMRecordWrapper>>();
// Initialize per sample lists
for (List<SAMRecordWrapper> origSample : currReads) {
List<SAMRecordWrapper> sampleReadsToRemap = new ArrayList<SAMRecordWrapper>();
readsToRemap.add(sampleReadsToRemap);
Iterator<SAMRecordWrapper> iter = origSample.iterator();
while (iter.hasNext()) {
SAMRecordWrapper read = iter.next();
// record == most recent read. read = cached read
if (record.getSamRecord().getAlignmentStart() - read.getSamRecord().getAlignmentStart() > MAX_READ_RANGE) {
// Only output reads with start pos within current chromosomeChunk
if (read.getSamRecord().getAlignmentStart() >= chromosomeChunk.getStart() &&
read.getSamRecord().getAlignmentStart() <= chromosomeChunk.getEnd()) {
sampleReadsToRemap.add(read);
}
iter.remove();
}
}
}
// Remap out of scope reads
long start = System.currentTimeMillis();
int totalReads = remapReads(regionContigs, readsToRemap, chromosomeChunkIdx);
long stop = System.currentTimeMillis();
Logger.debug("REMAP_READS_MSECS:\t%d\t%d\t%s:%d", (stop-start), totalReads, record.getSamRecord().getReferenceName(), record.getSamRecord().getAlignmentStart());
// Logger.debug("REMAP_READS_SECS:\t%d\t%s:%d", (stop-start)/1000, record.getSamRecord().getReferenceName(), record.getSamRecord().getAlignmentStart());
// Remove out of scope region assemblies
List<Feature> regionsToRemove = new ArrayList<Feature>();
for (Feature region : regionContigs.keySet()) {
if (getFirstStartPos(currReads)-region.getStart() > MAX_READ_RANGE) {
regionsToRemove.add(region);
}
}
for (Feature region : regionsToRemove) {
Logger.debug("Removing contigs for region: %s", region.toString());
regionContigs.remove(region);
}
String logPrefix = record.getSamRecord().getReferenceName() + ":" + record.getSamRecord().getAlignmentStart() + " : ";
if (regionContigs.size() > 10) {
Logger.debug("%s\tregionContigs size: %d", logPrefix, regionContigs.size());
}
//TODO: Revisit this. Is it still necessary?
int currReadsCount = 0;
int idx = 0;
boolean shouldClear = false;
for (List<SAMRecordWrapper> reads : currReads) {
currReadsCount += reads.size();
if (reads.size() >= this.maxCachedReads) {
shouldClear = true;
Logger.warn(logPrefix + " Too many reads for sample: " + idx + " num_reads: " + reads.size() + ", clearing.");
}
idx += 1;
}
if (shouldClear) {
for (int i=0; i<currReads.size(); i++) {
List<SAMRecordWrapper> reads = currReads.get(i);
for (SAMRecordWrapper read : reads) {
this.writer.addAlignment(i, read.getSamRecord(), chromosomeChunkIdx);
}
reads.clear();
}
}
if (currReadsCount > 250000) {
Logger.info(logPrefix + "\tCurr reads size: " + currReadsCount);
}
int outOfRegionCount = 0;
for (List<SAMRecordWrapper> reads : outOfRegionReads) {
outOfRegionCount += reads.size();
}
if (outOfRegionCount > 10000) {
Logger.info(logPrefix + "\tOut of region reads size: " + outOfRegionCount);
}
}
readCount += 1;
}
// Attempt to process last region if applicable
Iterator<Integer> regionIter = regionsToProcess.iterator();
while (regionIter.hasNext()) {
currRegionIdx = regionIter.next();
// We've moved beyond the current region
// Assemble reads
Feature region = chromosomeRegions.get(currRegionIdx);
Logger.debug("Processing region: %s", region);
Map<SimpleMapper, ContigAlignerResult> mappedContigs = processRegion(region, currReads, regionJunctions.get(region));
Logger.debug("Region: %s assembled: %d contigs", region, mappedContigs.keySet().size());
regionContigs.put(region, mappedContigs);
}
// Remap remaining reads
remapReads(regionContigs, currReads, chromosomeChunkIdx);
currReads.clear();
regionContigs.clear();
// Output remaining out of region reads
for (int i=0; i<outOfRegionReads.size(); i++) {
List<SAMRecordWrapper> outOfRegionReadsForSample = outOfRegionReads.get(i);
for (SAMRecordWrapper outOfRegionRead : outOfRegionReadsForSample) {
this.writer.addAlignment(i, outOfRegionRead.getSamRecord(), chromosomeChunkIdx);
}
outOfRegionReadsForSample.clear();
}
reader.close();
writer.finishChromosomeChunk(chromosomeChunkIdx);
clock.stopAndPrint();
}
private int getFirstStartPos(List<List<SAMRecordWrapper>> readsList) {
int minPos = Integer.MAX_VALUE;
for (List<SAMRecordWrapper> reads : readsList) {
if (reads.size() > 0 && reads.get(0).getSamRecord().getAlignmentStart() < minPos) {
minPos = reads.get(0).getSamRecord().getAlignmentStart();
}
}
return minPos;
}
private void logStartupInfo(String[] outputFiles) throws IOException {
Logger.info("ABRA version: " + this.version);
int ctr = 0;
for (String input : inputSams) {
Logger.info("input" + ctr + ": " + input);
}
ctr = 0;
for (String output : outputFiles) {
Logger.info("output" + ctr + ": " + output);
}
Logger.info("regions: " + regionsBed);
Logger.info("reference: " + reference);
Logger.info("num threads: " + numThreads);
Logger.info(assemblerSettings.getDescription());
Logger.info("paired end: " + isPairedEnd);
Logger.info("isSkipAssembly: " + isSkipAssembly);
Logger.info("useSoftClippedReads: " + useSoftClippedReads);
Logger.info("SW scoring: " + Arrays.toString(swScoring));
Logger.info("Soft clip params: " + Arrays.toString(softClipParams));
String javaVersion = System.getProperty("java.version");
Logger.info("Java version: " + javaVersion);
if (javaVersion.startsWith("1.6") || javaVersion.startsWith("1.5") || javaVersion.startsWith("1.4")) {
throw new RuntimeException("Please upgrade to Java 7 or later to run ABRA.");
}
try {
InetAddress localhost = java.net.InetAddress.getLocalHost();
String hostname = localhost.getHostName();
Logger.info("hostname: " + hostname);
} catch (Throwable t) {
Logger.error("Error getting hostname: " + t.getMessage());
}
}
private void spawnChromosomeThread(int chromosomeChunkIdx) throws InterruptedException {
ReAlignerRunnable thread = new ReAlignerRunnable(threadManager, this, chromosomeChunkIdx);
Logger.debug("Queuing thread for chromosome: " + chromosomeChunkIdx);
threadManager.spawnThread(thread);
}
private synchronized void appendContigs(String contigs) throws IOException {
if (contigWriter != null) {
contigWriter.write(contigs);
}
}
private void remapRead(ReadEvaluator readEvaluator, SAMRecord read, int origEditDist) {
Alignment alignment = readEvaluator.getImprovedAlignment(origEditDist, read);
if (alignment != null) {
if (Math.abs(read.getAlignmentStart() - alignment.pos) > SortedSAMWriter.GENOMIC_RANGE_TO_CACHE / 2) {
Logger.warn("Not moving read: " + read.getReadName() + " from: " + read.getAlignmentStart() + " to: " + alignment.pos);
} else {
int readPos = alignment.pos;
// Set contig alignment info for all reads that map to contigs (even if read is unchanged)
String ya = alignment.chromosome + ":" + alignment.contigPos + ":" + alignment.contigCigar;
// If no change to alignment, just record the YA tag
if (!read.getReadUnmappedFlag() && read.getAlignmentStart() == readPos && read.getCigarString().equals(alignment.cigar)) {
read.setAttribute("YA", ya);
}
// If the read has actually moved to an improved alignment, update
if (origEditDist > alignment.numMismatches && (read.getReadUnmappedFlag() || read.getAlignmentStart() != readPos || !read.getCigarString().equals(alignment.cigar))) {
read.setAttribute("YA", ya);
// Original alignment info
String yo = "N/A";
if (!read.getReadUnmappedFlag()) {
String origOrientation = read.getReadNegativeStrandFlag() ? "-" : "+";
yo = read.getReferenceName() + ":" + read.getAlignmentStart() + ":" + origOrientation + ":" + read.getCigarString();
} else {
read.setReadUnmappedFlag(false);
read.setMappingQuality(this.maxMapq);
}
read.setAttribute("YO", yo);
// Update alignment position and cigar and orientation
read.setAlignmentStart(alignment.pos);
read.setCigarString(alignment.cigar);
// If this is true, the read was already reverse complemented in the original alignment
if (read.getReadNegativeStrandFlag()) {
read.setReadNegativeStrandFlag(alignment.orientation == Orientation.FORWARD ? true : false);
} else {
read.setReadNegativeStrandFlag(alignment.orientation == Orientation.FORWARD ? false : true);
}
// Number of mismatches to contig
read.setAttribute("YM", alignment.numMismatches);
// Original edit distance
read.setAttribute("YX", origEditDist);
// Updated edit distance
read.setAttribute("NM", SAMRecordUtils.getEditDistance(read, c2r));
//TODO: Compute mapq intelligently???
read.setMappingQuality(Math.min(read.getMappingQuality()+10, this.maxMapq));
}
}
}
}
private int remapReads(Map<Feature, Map<SimpleMapper, ContigAlignerResult>> mappedContigs,
List<List<SAMRecordWrapper>> readsList, int chromosomeChunkIdx) throws Exception {
ReadEvaluator readEvaluator = new ReadEvaluator(mappedContigs);
int sampleIdx = 0;
int totalReads = 0;
// For each sample.
for (List<SAMRecordWrapper> reads : readsList) {
// For each read.
for (SAMRecordWrapper readWrapper : reads) {
totalReads += 1;
SAMRecord read = readWrapper.getSamRecord();
if (read.getMappingQuality() >= this.minMappingQuality || read.getReadUnmappedFlag()) {
if (Math.abs(read.getAlignmentStart() - read.getMateAlignmentStart()) < SortedSAMWriter.GENOMIC_RANGE_TO_CACHE &&
read.getReferenceName().equals(read.getMateReferenceName())) {
// TODO: Use NM tag if available (need to handle soft clipping though!)
int origEditDist = SAMRecordUtils.getEditDistance(read, c2r);
// int origEditDist = c2r.numMismatches(read);
if (origEditDist > 0 || SAMRecordUtils.getNumSplices(read) > 0) {
remapRead(readEvaluator, read, origEditDist);
}
}
}
}
// Output all reads for this sample
for (SAMRecordWrapper read : reads) {
this.writer.addAlignment(sampleIdx, read.getSamRecord(), chromosomeChunkIdx);
}
sampleIdx += 1;
}
return totalReads;
}
private List<List<SAMRecordWrapper>> subsetReads(Feature region, List<List<SAMRecordWrapper>> readsList) {
List<List<SAMRecordWrapper>> subset = new ArrayList<List<SAMRecordWrapper>>();
// Initialize per sample lists
for (List<SAMRecordWrapper> origSample : readsList) {
List<SAMRecordWrapper> subsetSample = new ArrayList<SAMRecordWrapper>();
subset.add(subsetSample);
for (SAMRecordWrapper read : origSample) {
if (region.overlapsRead(read.getSamRecord())) {
subsetSample.add(read);
}
}
}
return subset;
}
private ContigAlignerResult alignContig(String contig, ContigAligner ssw, List<ContigAligner> sswJunctions) {
ContigAlignerResult bestResult = null;
int bestScore = -1;
ContigAlignerResult sswResult;
for (ContigAligner sswJunc : sswJunctions) {
sswResult = sswJunc.align(contig);
if (sswResult != null && sswResult.getScore() > bestScore) {
bestScore = sswResult.getScore();
bestResult = sswResult;
}
}
sswResult = ssw.align(contig);
if (sswResult != null && sswResult.getScore() > bestScore) {
bestScore = sswResult.getScore();
bestResult = sswResult;
}
if (bestResult != null) {
Logger.debug("BEST_SSW: %d : %s : %d: %d : %s",
bestResult.getGenomicPos(), bestResult.getCigar(), bestResult.getRefPos(), bestResult.getScore(), bestResult.getSequence());
} else {
Logger.debug("NO_SSW: %s", contig);
}
//TODO: Check for tie scores with different final alignment
return bestResult;
// mappedContigs.put(new SimpleMapper(bestResult.getSequence()), bestResult);
}
private boolean assemble(List<ContigAlignerResult> results, Feature region,
String refSeq, List<String> bams, List<List<SAMRecordWrapper>> readsList, ContigAligner ssw,
List<ContigAligner> sswJunctions, int mnf, int mbq, double mer) throws IOException {
boolean shouldRetry = false;
NativeAssembler assem = (NativeAssembler) newAssembler(region);
List<Feature> regions = new ArrayList<Feature>();
regions.add(region);
String contigs = assem.assembleContigs(bams, regions, region.getDescriptor(), true, this, c2r, readsList, mnf, mbq, mer);
if (!contigs.equals("<ERROR>") && !contigs.equals("<REPEAT>") && !contigs.isEmpty()) {
if (contigWriter != null) {
appendContigs(contigs);
}
List<ScoredContig> scoredContigs = ScoredContig.convertAndFilter(contigs);
// Map contigs to reference
for (ScoredContig contig : scoredContigs) {
// Filter contigs that match the reference
if (!refSeq.contains(contig.getContig())) {
ContigAlignerResult sswResult = alignContig(contig.getContig(), ssw, sswJunctions);
if (sswResult == ContigAlignerResult.INDEL_NEAR_END) {
shouldRetry = true;
} else if (sswResult != null) {
// TODO: In multi-region processing, check to ensure identical contigs have identical mappings
results.add(sswResult);
}
}
}
}
return shouldRetry;
}
public Map<SimpleMapper, ContigAlignerResult> processRegion(Feature region, List<List<SAMRecordWrapper>> reads, List<Feature> junctions) throws Exception {
long start = System.currentTimeMillis();
if (isDebug) {
Logger.info("Processing region: " + region.getDescriptor());
}
if (region.getLength() > 10000) {
throw new IllegalArgumentException("Region too big: [" + region + "]");
}
Map<SimpleMapper, ContigAlignerResult> mappedContigs = new HashMap<SimpleMapper, ContigAlignerResult>();
List<List<SAMRecordWrapper>> readsList = subsetReads(region, reads);
boolean isRegionOk = true;
for (List<SAMRecordWrapper> sampleReads : readsList) {
//TODO: Don't allow these reads to remap to neighboring regions.
if (maxReadsInRegion < 0 || sampleReads.size() > this.maxReadsInRegion) {
Logger.info("Too many reads in %s: %d", region, sampleReads.size());
isRegionOk = false;
break;
}
// int lowMapq = 0;
// for (SAMRecordWrapper read : sampleReads) {
// if (read.getSamRecord().getMappingQuality() < minMappingQuality) {
// lowMapq += 1;
// if ((float) lowMapq / (float) sampleReads.size() > .25) {
// Logger.info("Too many low mapq reads in %s. %d out of %d", region, lowMapq, sampleReads.size());
// isRegionOk = false;
// break;
}
if (isRegionOk) {
List<String> bams = new ArrayList<String>(Arrays.asList(this.inputSams));
// Get reference sequence matching current region (pad by 2 read lengths on each side)
int chromosomeLength = c2r.getReferenceLength(region.getSeqname());
int refSeqStart = Math.max((int) region.getStart() - this.readLength*2, 1);
int refSeqLength = Math.min((int) region.getLength() + this.readLength*4, chromosomeLength-1);
String refSeq = c2r.getSequence(region.getSeqname(), refSeqStart, refSeqLength);
ContigAligner ssw = new ContigAligner(refSeq, region.getSeqname(), refSeqStart, this.readLength, minAnchorLen, maxAnchorMismatches);
List<ContigAligner> sswJunctions = new ArrayList<ContigAligner>();
// List<List<Feature>> junctionPermutations = JunctionUtils.combineJunctions(junctions, this.readLength);
List<List<Feature>> junctionPermutations = new ArrayList<List<Feature>>();
try {
junctionPermutations = JunctionUtils.combineJunctions(region, junctions, MAX_REGION_LENGTH, this.readLength);
} catch (TooManyJunctionPermutationsException e) {
Logger.warn("TOO_MANY_POTENTIAL_JUNCTION_PERMUTATIONS: " + region.getDescriptor());
}
Logger.debug("NUM_JUNCTION_PERMUTATIONS:\t%d\t%s", junctionPermutations.size(), region);
if (junctionPermutations.size() > JunctionUtils.MAX_JUNCTION_PERMUTATIONS) {
Logger.warn("TOO_MANY_JUNCTION_PERMUTATIONS: " + region.getDescriptor() + "\t" + junctionPermutations.size());
} else {
for (List<Feature> junctionPerm : junctionPermutations) {
// List of junction positions within localized reference
List<Integer> junctionPos = new ArrayList<Integer>();
// List of junction lengths within localized reference
List<Integer> junctionLengths = new ArrayList<Integer>();
StringBuffer juncSeq = new StringBuffer();
int refStart = Math.max((int) junctionPerm.get(0).getStart() - (int) region.getLength() - this.readLength*2, 1);
String leftSeq = c2r.getSequence(region.getSeqname(), refStart, (int) junctionPerm.get(0).getStart() - refStart);
juncSeq.append(leftSeq);
junctionPos.add(leftSeq.length());
junctionLengths.add((int) junctionPerm.get(0).getLength()+1);
boolean isJunctionGapTooBig = false;
for (int i=1; i<junctionPerm.size(); i++) {
int midStart = (int) junctionPerm.get(i-1).getEnd()+1;
String middleSeq = c2r.getSequence(region.getSeqname(), midStart, (int) junctionPerm.get(i).getStart() - midStart);
if (middleSeq.length() > region.getLength()*2) {
isJunctionGapTooBig = true;
break;
}
juncSeq.append(middleSeq);
junctionPos.add(juncSeq.length());
junctionLengths.add((int) junctionPerm.get(i).getLength()+1);
}
// TODO: Tighten this up...
if (!isJunctionGapTooBig && juncSeq.length() < region.getLength()*10) {
// Sequence on right of last junction
// Junction stop is exclusive, so add 1 to starting position (junction end + 1)
Feature lastJunction = junctionPerm.get(junctionPerm.size()-1);
int rightStart = (int) lastJunction.getEnd()+1;
int rightStop = Math.min((int) lastJunction.getEnd() + (int) region.getLength() + this.readLength*2, chromosomeLength-1);
if (rightStop-rightStart > 0) {
String rightSeq = c2r.getSequence(region.getSeqname(), rightStart, rightStop-rightStart);
juncSeq.append(rightSeq);
// Junction pos and length should already be added
ContigAligner sswJunc = new ContigAligner(juncSeq.toString(), region.getSeqname(), refStart, this.readLength, minAnchorLen, maxAnchorMismatches, junctionPos, junctionLengths);
sswJunctions.add(sswJunc);
}
}
}
// Assemble contigs
if (this.isSkipAssembly || region.getKmer() > this.readLength-15) {
Logger.debug("Skipping assembly of region: " + region.getDescriptor() + " - " + region.getKmer());
} else {
List<ContigAlignerResult> results = new ArrayList<ContigAlignerResult>();
boolean shouldRetry = assemble(results, region, refSeq, bams, readsList, ssw, sswJunctions,
assemblerSettings.getMinNodeFrequncy(), assemblerSettings.getMinBaseQuality(),
assemblerSettings.getMinEdgeRatio()/2.0);
if (shouldRetry) {
Logger.debug("RETRY_ASSEMBLY: %s", region);
// Indel near edge of contig indicates that we may have a low coverage indel encountered.
// Try to reassemble using less stringent pruning to see if we can get greater coverage.
results.clear();
assemble(results, region, refSeq, bams, readsList, ssw, sswJunctions,
assemblerSettings.getMinNodeFrequncy()/2, assemblerSettings.getMinBaseQuality()/2,
assemblerSettings.getMinEdgeRatio()/2.0);
}
for (ContigAlignerResult sswResult : results) {
mappedContigs.put(new SimpleMapper(sswResult.getSequence(), maxMismatchRate), sswResult);
}
}
if (useSoftClippedReads || useObservedIndels) {
Logger.debug("Processing non-assembled contigs for region: [" + region + "]");
// Go through artificial contig generation using indels observed in the original reads
AltContigGenerator altContigGenerator = new AltContigGenerator(softClipParams[0], softClipParams[1], softClipParams[2], softClipParams[3],
useObservedIndels, useSoftClippedReads, useConsensusSeq, minMappingQuality);
Collection<String> altContigs = altContigGenerator.getAltContigs(readsList, c2r, readLength);
for (String contig : altContigs) {
// TODO: Check to see if this contig is already in the map before aligning
//TODO: Include junctions !!!
ContigAlignerResult sswResult = ssw.align(contig);
if (sswResult != null && sswResult != ContigAlignerResult.INDEL_NEAR_END) {
// Set as secondary for remap prioritization
sswResult.setSecondary(true);
// Store for read mapping
mappedContigs.put(new SimpleMapper(sswResult.getSequence(), maxMismatchRate), sswResult);
}
}
}
}
}
long stop = System.currentTimeMillis();
Logger.debug("PROCESS_REGION_MSECS:\t%d\t%s", (stop-start), region.getDescriptor());
return mappedContigs;
}
// Pair up junctions that could be spanned by a single read
protected List<Pair<Feature, Feature>> pairJunctions(List<Feature> junctions, int maxDist) {
List<Pair<Feature, Feature>> junctionPairs = new ArrayList<Pair<Feature, Feature>>();
for (Feature junc1 : junctions) {
for (Feature junc2 : junctions) {
if (junc1.getEnd() < junc2.getStart() && junc1.getEnd() + maxDist >= junc2.getStart()) {
junctionPairs.add(new Pair<Feature, Feature>(junc1, junc2));
}
}
}
return junctionPairs;
}
static List<Feature> getRegions(String regionsBed, int readLength, boolean hasPresetKmers) throws IOException {
RegionLoader loader = new RegionLoader();
List<Feature> regions = loader.load(regionsBed, hasPresetKmers);
if (regions.size() > 0 && (regions.get(0).getKmer() == 0)) {
regions = RegionLoader.collapseRegions(regions, readLength);
regions = splitRegions(regions);
}
return regions;
}
static List<Feature> getRegionsNoBed(int readLength, SAMFileHeader header) throws IOException {
List<Feature> regions = new ArrayList<Feature>();
List<SAMSequenceRecord> refSeq = header.getSequenceDictionary().getSequences();
for (SAMSequenceRecord seq : refSeq) {
Feature region = new Feature(seq.getSequenceName(), 1, seq.getSequenceLength());
regions.add(region);
}
regions = RegionLoader.collapseRegions(regions, readLength);
regions = splitRegions(regions);
return regions;
}
private void loadRegions() throws IOException {
if (regionsBed != null) {
Logger.info("Loading target regions from : " + regionsBed);
this.regions = getRegions(regionsBed, readLength, hasPresetKmers);
} else {
Logger.info("No target bed file specified. Gathering regions using SAM header");
this.regions = getRegionsNoBed(readLength, this.samHeaders[0]);
}
Logger.info("Num regions: " + regions.size());
if (Logger.LEVEL == Logger.Level.TRACE) {
for (Feature region : regions) {
Logger.trace("%s\t%d\t%d\t%d", region.getSeqname(), region.getStart(), region.getEnd(), region.getKmer());
}
}
}
private void loadJunctions() throws IOException {
if (this.gtfJunctionFile != null) {
this.junctions = JunctionUtils.loadJunctionsFromGtf(gtfJunctionFile);
}
if (this.junctionFile != null) {
RegionLoader loader = new RegionLoader();
List<Feature> observedJunctions = loader.load(junctionFile, false);
Logger.info("Loaded " + observedJunctions.size() + " observed junctions");
junctions.addAll(observedJunctions);
}
Logger.info("Total junctions input: " + junctions.size());
}
public void setRegionsBed(String bedFile) {
this.regionsBed = bedFile;
}
private void getSamHeaderAndReadLength() throws IOException {
Logger.info("Identifying header and determining read length");
this.samHeaders = new SAMFileHeader[this.inputSams.length];
for (int i=0; i<this.inputSams.length; i++) {
SamReader reader = SAMRecordUtils.getSamReader(inputSams[i]);
try {
samHeaders[i] = reader.getFileHeader();
samHeaders[i].setSortOrder(SAMFileHeader.SortOrder.unsorted);
Iterator<SAMRecord> iter = reader.iterator();
int cnt = 0;
while ((iter.hasNext()) && (cnt < 1000000)) {
SAMRecord read = iter.next();
this.readLength = Math.max(this.readLength, read.getReadLength());
this.maxMapq = Math.max(this.maxMapq, read.getMappingQuality());
// Assumes aligner sets proper pair flag correctly
if ((isPairedEnd) && (read.getReadPairedFlag()) && (read.getProperPairFlag())) {
this.minInsertLength = Math.min(this.minInsertLength, Math.abs(read.getInferredInsertSize()));
this.maxInsertLength = Math.max(this.maxInsertLength, Math.abs(read.getInferredInsertSize()));
}
cnt += 1;
}
// Allow some fudge in insert length
minInsertLength = Math.max(minInsertLength - 2*readLength, 0);
maxInsertLength = maxInsertLength + 2*readLength;
} finally {
reader.close();
}
}
Logger.info("Min insert length: " + minInsertLength);
Logger.info("Max insert length: " + maxInsertLength);
Logger.info("Max read length is: " + readLength);
if (assemblerSettings.getMinContigLength() < 1) {
assemblerSettings.setMinContigLength(Math.max(readLength+1, MIN_CONTIG_LENGTH));
}
Logger.info("Min contig length: " + assemblerSettings.getMinContigLength());
}
static class Pair<T, Y> {
private T t;
private Y y;
public Pair(T t, Y y) {
this.t = t;
this.y = y;
}
public T getFirst() {
return t;
}
public Y getSecond() {
return y;
}
}
static List<Feature> splitRegions(List<Feature> regions,
int maxRegionLength, int minRegionRemainder, int regionOverlap) {
List<Feature> splitRegions = new ArrayList<Feature>();
for (Feature region : regions) {
if (region.getLength() <= maxRegionLength + minRegionRemainder) {
splitRegions.add(region);
} else {
splitRegions.addAll(splitWithOverlap(region, maxRegionLength, minRegionRemainder, regionOverlap));
}
}
return splitRegions;
}
/**
* If any of the input list of features is greater than maxSize, split them into multiple features.
*/
public static List<Feature> splitRegions(List<Feature> regions) {
return splitRegions(regions, MAX_REGION_LENGTH, MIN_REGION_REMAINDER, REGION_OVERLAP);
}
public static List<Feature> splitWithOverlap(Feature region) {
return splitWithOverlap(region, MAX_REGION_LENGTH, MIN_REGION_REMAINDER, REGION_OVERLAP);
}
static List<Feature> splitWithOverlap(Feature region, int maxRegionLength,
int minRegionRemainder, int regionOverlap) {
List<Feature> regions = new ArrayList<Feature>();
long pos = region.getStart();
long end = pos-1;
while (end < region.getEnd()) {
long start = pos;
end = pos + maxRegionLength;
long marker = end;
// If we're at or near the end of the region, stop at region end.
if (end > (region.getEnd() - minRegionRemainder)) {
end = region.getEnd();
}
pos = marker - regionOverlap;
regions.add(new Feature(region.getSeqname(), start, end));
}
return regions;
}
int[] getKmers(Feature region) {
int[] kmerSizes = null;
int kmerSize = region.getKmer();
if (kmerSize > 0) {
kmerSizes = toKmerArray(kmerSize, readLength);
} else {
kmerSizes = assemblerSettings.getKmerSize();
}
return kmerSizes;
}
int[] toKmerArray(int kmerSize, int readLength) {
int[] kmerSizes = null;
int maxKmerSize = this.readLength-5;
if (maxKmerSize > MAX_KMER_SIZE) {
maxKmerSize = MAX_KMER_SIZE;
}
List<Integer> kmers = new ArrayList<Integer>();
while (kmerSize < maxKmerSize) {
kmers.add(kmerSize);
kmerSize += 2;
}
kmerSizes = new int[kmers.size()];
int i=0;
for (int kmer : kmers) {
kmerSizes[i++] = kmer;
}
return kmerSizes;
}
private NativeAssembler newAssembler(Feature region) {
NativeAssembler assem = new NativeAssembler();
assem.setTruncateOutputOnRepeat(true);
assem.setMaxPathsFromRoot(100000);
assem.setReadLength(readLength);
//assem.setKmer(assemblerSettings.getKmerSize());
assem.setKmer(getKmers(region));
assem.setMinKmerFrequency(assemblerSettings.getMinNodeFrequncy());
assem.setMinEdgeRatio(assemblerSettings.getMinEdgeRatio());
assem.setMinBaseQuality(assemblerSettings.getMinBaseQuality());
assem.setMaxNodes(assemblerSettings.getMaxNodes());
assem.setMinReadCandidateFraction(assemblerSettings.getMinReadCandidateFraction());
assem.setMaxAverageDepth(assemblerSettings.getMaxAverageDepth());
return assem;
}
private void deleteOnExit(File file) {
if (!isKeepTmp) {
file.deleteOnExit();
}
}
private String init() throws IOException {
if (tmpDir == null) {
tmpDir = System.getProperty("java.io.tmpdir");
} else {
System.setProperty("java.io.tmpdir", tmpDir);
}
ContigAligner.init(swScoring);
Set<PosixFilePermission> perms = new HashSet<PosixFilePermission>();
perms.add(PosixFilePermission.OWNER_READ);
perms.add(PosixFilePermission.OWNER_WRITE);
perms.add(PosixFilePermission.OWNER_EXECUTE);
perms.add(PosixFilePermission.GROUP_READ);
perms.add(PosixFilePermission.GROUP_EXECUTE);
Path tempDir = Files.createTempDirectory("abra2_" + UUID.randomUUID(), PosixFilePermissions.asFileAttribute(perms));
deleteOnExit(tempDir.toFile());
Logger.info("Using temp directory: " + tempDir.toString());
new NativeLibraryLoader().load(tempDir.toString(), NativeLibraryLoader.ABRA, false);
// new NativeLibraryLoader().load(tempDir.toString(), NativeLibraryLoader.SSW, false);
// new NativeLibraryLoader().load(tempDir.toString(), NativeLibraryLoader.SSW_JNI, false);
new NativeLibraryLoader().load(tempDir.toString(), NativeLibraryLoader.DEFLATOR, true);
threadManager = new ThreadManager(numThreads);
return tempDir.toString();
}
public void setReference(String reference) {
this.reference = reference;
}
public void setAssemblerSettings(AssemblerSettings settings) {
this.assemblerSettings = settings;
}
public void setNumThreads(int numThreads) {
this.numThreads = numThreads;
}
public CompareToReference2 getC2r() {
return this.c2r;
}
public int getMinMappingQuality() {
return this.minMappingQuality;
}
public int getMaxInsertLength() {
return this.maxInsertLength;
}
public int getMinInsertLength() {
return this.minInsertLength;
}
public void setMaxInsertLength(int maxInsertLen) {
this.maxInsertLength = maxInsertLen;
}
public void setMinInsertLength(int minInsertLen) {
this.minInsertLength = minInsertLen;
}
boolean isFiltered(SAMRecord read) {
return SAMRecordUtils.isFiltered(isPairedEnd, read);
}
private static String getVersion() {
String version = "unknown";
String metaFile = "/META-INF/maven/abra2/abra2/pom.properties";
Properties prop = new Properties();
try {
URL url = NativeLibraryLoader.class.getResource(metaFile);
InputStream input = url.openStream();
prop.load(input);
input.close();
version = prop.getProperty("version");
} catch (Exception e) {
e.printStackTrace();
Logger.error("Error reading version from pom.properties");
}
return version;
}
private static String getCommandLine(String[] args) {
String jar = "";
CodeSource cs = Abra.class.getProtectionDomain().getCodeSource();
if (cs != null) {
jar = cs.getLocation().toString();
if (jar.startsWith("file:")) {
jar = jar.replaceFirst("file:", "");
}
}
StringBuffer cl = new StringBuffer();
cl.append(jar);
for (String arg : args) {
cl.append(' ');
cl.append(arg);
}
return cl.toString();
}
public static void run(String[] args) throws Exception {
String version = getVersion();
Logger.info("Abra version: " + version);
String cl = getCommandLine(args);
Logger.info("Abra params: [" + cl + "]");
ReAlignerOptions options = new ReAlignerOptions();
options.parseOptions(args);
if (options.isValid()) {
Logger.setLevel(options.getLoggerLevel());
AssemblerSettings assemblerSettings = new AssemblerSettings();
assemblerSettings.setKmerSize(options.getKmerSizes());
assemblerSettings.setMinContigLength(options.getMinContigLength());
assemblerSettings.setMinNodeFrequncy(options.getMinNodeFrequency());
assemblerSettings.setMinBaseQuality(options.getMinBaseQuality());
assemblerSettings.setMinReadCandidateFraction(options.getMinReadCandidateFraction());
assemblerSettings.setMaxAverageDepth(options.getMaxAverageRegionDepth());
assemblerSettings.setMinEdgeRatio(options.getMinEdgeRatio());
assemblerSettings.setMaxNodes(options.getMaxNodes());
ReAligner realigner = new ReAligner();
realigner.setReference(options.getReference());
realigner.setRegionsBed(options.getTargetRegionFile());
realigner.setAssemblerSettings(assemblerSettings);
realigner.setNumThreads(options.getNumThreads());
realigner.isPairedEnd = options.isPairedEnd();
realigner.minMappingQuality = options.getMinimumMappingQuality();
realigner.maxMismatchRate = options.getMaxMismatchRate();
realigner.maxReadsInRegion = options.getMaxReadsInRegion();
realigner.hasPresetKmers = options.hasPresetKmers();
realigner.isSkipAssembly = options.isSkipAssembly();
realigner.useObservedIndels = options.useObservedIndels();
realigner.useConsensusSeq = options.useConsensusSequence();
realigner.isKeepTmp = options.isKeepTmp();
realigner.tmpDir = options.getTmpDir();
realigner.useSoftClippedReads = options.useSoftClippedReads();
realigner.junctionFile = options.getJunctionFile();
realigner.gtfJunctionFile = options.getGtfJunctionFile();
realigner.contigFile = options.getContigFile();
realigner.swScoring = options.getSmithWatermanScoring();
realigner.softClipParams = options.getSoftClipParams();
realigner.maxCachedReads = options.getMaxCachedReads();
realigner.finalCompressionLevel = options.getCompressionLevel();
realigner.minAnchorLen = options.getContigAnchor()[0];
realigner.maxAnchorMismatches = options.getContigAnchor()[1];
MAX_REGION_LENGTH = options.getWindowSize();
MIN_REGION_REMAINDER = options.getWindowOverlap();
REGION_OVERLAP = options.getWindowOverlap();
realigner.cl = cl.toString();
realigner.version = version;
long s = System.currentTimeMillis();
realigner.reAlign(options.getInputFiles(), options.getOutputFiles());
long e = System.currentTimeMillis();
Logger.info("Elapsed seconds: " + (e - s) / 1000);
} else {
System.exit(-1);
}
}
public static void main(String[] args) throws Exception {
// String inp = "--in /home/lmose/dev/ayc/opt/mem/test_tumor.bam --kmer 43 --mc-mapq 25 --mcl 101 --mcr -1.0 --mnf 2 --umnf 2 --mpc 50000 --out /home/lmose/dev/ayc/opt/mem/test_tumor.abra.bam --ref /home/lmose/reference/test/test.fa --targets /home/lmose/dev/ayc/opt/mem/test.gtf --threads 2 --working /home/lmose/dev/ayc/opt/mem/work1 --mur 50000000 --no-unalign --mbq 20 --rcf .02";
String inp = "--in /home/lmose/dev/ayc/opt/mem/test_tumor.bam --kmer 43 --out /home/lmose/dev/ayc/opt/mem/test_tumor.abra3.bam --ref /home/lmose/reference/test/test.fa --targets /home/lmose/dev/ayc/opt/mem/test2.bed --threads 2 --working /home/lmose/dev/ayc/opt/mem/work3";
run(inp.split("\\s+"));
}
}
|
package apoc.load;
import apoc.Description;
import apoc.result.RowResult;
import apoc.ApocConfiguration;
import org.neo4j.logging.Log;
import org.neo4j.procedure.Context;
import org.neo4j.procedure.Name;
import org.neo4j.procedure.Procedure;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.sql.*;
import java.util.*;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
/**
* @author mh
* @since 26.02.16
*/
public class Jdbc {
static {
ApocConfiguration.get("jdbc").forEach((k, v) -> {
if (k.endsWith("driver")) loadDriver(v.toString());
});
}
@Context
public Log log;
@Procedure
@Description("apoc.load.driver('org.apache.derby.jdbc.EmbeddedDriver') register JDBC driver of source database")
public void driver(@Name("driverClass") String driverClass) {
loadDriver(driverClass);
}
private static void loadDriver(@Name("driverClass") String driverClass) {
try {
Class.forName(driverClass);
} catch (ClassNotFoundException e) {
throw new RuntimeException("Could not load driver class "+driverClass+" "+e.getMessage());
}
}
@Procedure
@Description("apoc.load.jdbc('key or url','table or statement') YIELD row - load from relational database, from a full table or a sql statement")
public Stream<RowResult> jdbc(@Name("jdbc") String urlOrKey, @Name("tableOrSql") String tableOrSelect) {
return executeQuery(urlOrKey, tableOrSelect);
}
@Procedure
@Description("apoc.load.jdbcParams('key or url','statement',[params]) YIELD row - load from relational database, from a sql statement with parameters")
public Stream<RowResult> jdbcParams(@Name("jdbc") String urlOrKey, @Name("sql") String select, @Name("params") List<Object> params) {
return executeQuery(urlOrKey, select,params.toArray(new Object[params.size()]));
}
private Stream<RowResult> executeQuery(@Name("jdbc") String urlOrKey, @Name("tableOrSql") String tableOrSelect, Object...params) {
String url = urlOrKey.contains(":") ? urlOrKey : getJdbcUrl(urlOrKey);
String query = tableOrSelect.indexOf(' ') == -1 ? "SELECT * FROM " + tableOrSelect : tableOrSelect;
try {
Connection connection = DriverManager.getConnection(url);
PreparedStatement stmt = connection.prepareStatement(query);
for (int i = 0; i < params.length; i++) stmt.setObject(i+1, params[i]);
ResultSet rs = stmt.executeQuery();
Iterator<Map<String, Object>> supplier = new ResultSetIterator(rs);
Spliterator<Map<String, Object>> spliterator = Spliterators.spliteratorUnknownSize(supplier, Spliterator.ORDERED);
return StreamSupport.stream(spliterator, false).map(RowResult::new).onClose( () -> closeIt(stmt,connection));
} catch (SQLException e) {
log.error(String.format("Cannot execute SQL statement `%s`.%nError:%n%s", query, e.getMessage()),e);
throw new RuntimeException(String.format("Cannot execute SQL statement `%s`.%nError:%n%s", query, e.getMessage()), e);
}
}
static void closeIt(AutoCloseable...closeables) {
for (AutoCloseable c : closeables) {
try {
c.close();
} catch (Exception e) {
// ignore
}
}
}
private static String getJdbcUrl(String key) {
Object value = ApocConfiguration.get("jdbc").get(key + ".url");
if (value == null) throw new RuntimeException("No apoc.jdbc."+key+".url jdbc url specified");
return value.toString();
}
private static class ResultSetIterator implements Iterator<Map<String, Object>> {
private final ResultSet rs;
private final String[] columns;
private Map<String, Object> map;
public ResultSetIterator(ResultSet rs) throws SQLException {
this.rs = rs;
this.columns = getMetaData(rs);
this.map = get();
}
private String[] getMetaData(ResultSet rs) throws SQLException {
ResultSetMetaData meta = rs.getMetaData();
int cols = meta.getColumnCount();
String[] columns = new String[cols + 1];
for (int col = 1; col <= cols; col++) {
columns[col] = meta.getColumnName(col);
}
return columns;
}
@Override
public boolean hasNext() {
return this.map != null;
}
@Override
public Map<String, Object> next() {
Map<String, Object> current = this.map;
this.map = get();
return current;
}
public Map<String, Object> get() {
try {
if (handleEndOfResults()) return null;
Map<String, Object> row = new LinkedHashMap<>(columns.length);
for (int col = 1; col < columns.length; col++) {
row.put(columns[col], convert(rs.getObject(col)));
}
return row;
} catch (SQLException e) {
throw new RuntimeException("Cannot execute read result-set.", e);
}
}
private Object convert(Object value) {
if (value instanceof UUID || value instanceof BigInteger || value instanceof BigDecimal) {
return value.toString();
}
return value;
}
private boolean handleEndOfResults() throws SQLException {
if (rs.isClosed()) {
return true;
}
if (!rs.next()) {
if (!rs.isClosed()) {
// rs.close();
rs.getStatement().close();
}
return true;
}
return false;
}
}
}
|
package dominio;
/**
* Es uno de los tipos de persona, herendando asi sus atributos y
* funcionalidades.
*
*/
public class Humano extends Personaje {
public Humano(String nombre, Casta casta, int id) {
super(nombre, casta, id);
saludTope += 5;
energiaTope += 5;
salud = saludTope;
energia = energiaTope;
nombreRaza = "Humano";
}
public Humano(String nombre, int salud, int energia, int fuerza, int destreza, int inteligencia, Casta casta, int experiencia, int nivel, int idPersonaje) {
super(nombre, salud, energia, fuerza, destreza, inteligencia, casta, experiencia, nivel, idPersonaje);
nombreRaza = "Humano";
habilidadesRaza = new String[2];
habilidadesRaza[0] = "Incentivar";
habilidadesRaza[1] = "Golpe Fatal";
}
// Incentivar
/*
* (non-Javadoc)
*
* @see dominio.Personaje#habilidadRaza1(dominio.Peleable) Devuele si el
* personaje en cuestion tiene o no la habilidad requerida
*/
@Override
public boolean habilidadRaza1(Peleable atacado) {
if (this.getEnergia() > 10) {
this.setEnergia(this.getEnergia() - 10);
atacado.setAtaque(atacado.getAtaque() + this.getMagia());
return true;
}
return false;
}
// Golpe Fatal
@Override
public boolean habilidadRaza2(Peleable atacado) {
if (this.getEnergia() > 10) {
if (atacado.serAtacado(atacado.getSalud() / 2) > 0) {
this.setEnergia(this.getEnergia() / 2);
return true;
}
}
this.setEnergia(this.getEnergia() - 10);
return false;
}
}
|
package items;
import java.util.HashMap;
public abstract class BaseItem {
// public variables
public int id;
public String name;
/**
* constructor
* @param id id of the item
* @param name name of the item
*/
public BaseItem(int id, String name) {
this.id = id;
this.name = name;
}
/**
* gets the item as arguments to send
* @return the item as arguments
*/
public abstract HashMap<String, String> getSpecAsArguments();
/**
* detailed info about the item
*/
@Override
public String toString() {
return "Name : " + name;
}
}
|
package org.g_node;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.g_node.crawler.Controller;
import org.g_node.crawler.LKTLogbook.LKTLogController;
import org.g_node.crawler.LKTLogbook.LKTLogParser;
/**
* Main application class used to parse command line input and pass
* information to the appropriate modules.
*
* This application is a prototype, don't hate me if stuff is partially suboptimal or outright sucks.
*
* @author Michael Sonntag (sonntag@bio.lmu.de)
*/
public class App {
/**
* Registry containing all crawlers implemented
* and available to this application.
*/
private final Map<String, Controller> crawlers;
/**
* Constructor.
*/
App() {
this.crawlers = new HashMap<>();
}
/**
* Main method of the crawler-to-rdf framework. Registers all so far available crawlers and
* selects and runs the appropriate crawler dependent on commandline input.
* @param args User provided commandline arguments.
*/
public static void main(final String[] args) {
final App currApp = new App();
currApp.register();
currApp.run(args);
}
/**
* Method to register all implemented crawlers with their short hand.
* The short hand is required to select and run the intended crawler.
*/
public final void register() {
this.crawlers.put("lkt", new LKTLogController(new LKTLogParser()));
}
/**
* Method to parse the commandline arguments, provide
* appropriate error messages if required and run the selected
* crawler.
* The first argument of the command line has to be the shorthand of the required crawler.
* @param args User provided commandline arguments.
*/
public final void run(final String[] args) {
if (args.length < 1) {
System.err.println(
String.join(
"", "No crawler selected!",
"\n Please use syntax: 'java -jar crawler-to-rdf.jar [crawler] [crawler options]'",
"\n e.g. 'java -jar crawler-to-rdf.jar lkt -i labbook.ods -o out.ttl'",
"\n Currently available crawlers: ", this.crawlers.keySet().toString()
)
);
} else if (this.crawlers.containsKey(args[0])) {
final HelpFormatter printHelp = new HelpFormatter();
final CommandLineParser parser = new DefaultParser();
final Controller currCrawlerController = this.crawlers.get(args[0]);
final Options useOptions = this.crawlers.get(args[0]).options(this.crawlers.keySet());
try {
final CommandLine cmd = parser.parse(useOptions, args, false);
if (cmd.hasOption("h")) {
printHelp.printHelp("Help", useOptions);
return;
}
currCrawlerController.run(cmd);
} catch (final ParseException exp) {
printHelp.printHelp("Help", useOptions);
System.err.println(
String.join("", "\n[Error] ", exp.getMessage(), "\n")
);
}
} else {
System.err.println(
String.join(
"", "Oh no, selected crawler '", args[0], "' does not exist!",
"\n Please use syntax: 'java crawler-to-rdf.jar [crawler] [crawler options]'",
"\n e.g. 'java crawler-to-rdf.jar lkt -i labbook.ods -o out.ttl'",
"\n Currently available crawlers: ", this.crawlers.keySet().toString()
)
);
}
}
}
|
package org.zeromq;
import java.io.Closeable;
import java.nio.ByteBuffer;
import java.nio.channels.SelectableChannel;
import java.nio.channels.Selector;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.ArrayList;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.zeromq.proto.ZPicture;
import zmq.Ctx;
import zmq.Options;
import zmq.SocketBase;
import zmq.ZError;
import zmq.ZError.CtxTerminatedException;
import zmq.io.coder.IDecoder;
import zmq.io.coder.IEncoder;
import zmq.io.mechanism.Mechanisms;
import zmq.io.net.SelectorProviderChooser;
import zmq.msg.MsgAllocator;
import zmq.util.Draft;
import zmq.util.Z85;
public class ZMQ
{
/**
* Socket flag to indicate that more message parts are coming.
*/
public static final int SNDMORE = zmq.ZMQ.ZMQ_SNDMORE;
// Values for flags in Socket's send and recv functions.
/**
* Socket flag to indicate a nonblocking send or recv mode.
*/
public static final int DONTWAIT = zmq.ZMQ.ZMQ_DONTWAIT;
public static final int NOBLOCK = zmq.ZMQ.ZMQ_DONTWAIT;
// Socket types, used when creating a Socket. Note that all of the int types here is
// deprecated, use SocketType instead
@Deprecated
public static final int PAIR = zmq.ZMQ.ZMQ_PAIR;
@Deprecated
public static final int PUB = zmq.ZMQ.ZMQ_PUB;
@Deprecated
public static final int SUB = zmq.ZMQ.ZMQ_SUB;
@Deprecated
public static final int REQ = zmq.ZMQ.ZMQ_REQ;
@Deprecated
public static final int REP = zmq.ZMQ.ZMQ_REP;
@Deprecated
public static final int DEALER = zmq.ZMQ.ZMQ_DEALER;
/**
* Old alias for DEALER flag.
* Flag to specify a XREQ socket, receiving side must be a XREP.
*
* @deprecated As of release 3.0 of zeromq, replaced by {@link #DEALER}
*/
@Deprecated
public static final int XREQ = DEALER;
@Deprecated
public static final int ROUTER = zmq.ZMQ.ZMQ_ROUTER;
/**
* Old alias for ROUTER flag.
* Flag to specify the receiving part of a XREQ socket.
*
* @deprecated As of release 3.0 of zeromq, replaced by {@link #ROUTER}
*/
@Deprecated
public static final int XREP = ROUTER;
@Deprecated
public static final int PULL = zmq.ZMQ.ZMQ_PULL;
@Deprecated
public static final int PUSH = zmq.ZMQ.ZMQ_PUSH;
@Deprecated
public static final int XPUB = zmq.ZMQ.ZMQ_XPUB;
@Deprecated
public static final int XSUB = zmq.ZMQ.ZMQ_XSUB;
@Deprecated
public static final int STREAM = zmq.ZMQ.ZMQ_STREAM;
/**
* Flag to specify a STREAMER device.
*/
@Deprecated
public static final int STREAMER = zmq.ZMQ.ZMQ_STREAMER;
/**
* Flag to specify a FORWARDER device.
*/
@Deprecated
public static final int FORWARDER = zmq.ZMQ.ZMQ_FORWARDER;
/**
* Flag to specify a QUEUE device.
*/
@Deprecated
public static final int QUEUE = zmq.ZMQ.ZMQ_QUEUE;
/**
* @see org.zeromq.ZMQ#PULL
*/
@Deprecated
public static final int UPSTREAM = PULL;
/**
* @see org.zeromq.ZMQ#PUSH
*/
@Deprecated
public static final int DOWNSTREAM = PUSH;
/**
* EVENT_CONNECTED: connection established.
* The EVENT_CONNECTED event triggers when a connection has been
* established to a remote peer. This can happen either synchronous
* or asynchronous. Value is the FD of the newly connected socket.
*/
public static final int EVENT_CONNECTED = zmq.ZMQ.ZMQ_EVENT_CONNECTED;
/**
* EVENT_CONNECT_DELAYED: synchronous connect failed, it's being polled.
* The EVENT_CONNECT_DELAYED event triggers when an immediate connection
* attempt is delayed and its completion is being polled for. Value has
* no meaning.
*/
public static final int EVENT_CONNECT_DELAYED = zmq.ZMQ.ZMQ_EVENT_CONNECT_DELAYED;
/**
* @see org.zeromq.ZMQ#EVENT_CONNECT_DELAYED
*/
@Deprecated
public static final int EVENT_DELAYED = EVENT_CONNECT_DELAYED;
/**
* EVENT_CONNECT_RETRIED: asynchronous connect / reconnection attempt.
* The EVENT_CONNECT_RETRIED event triggers when a connection attempt is
* being handled by reconnect timer. The reconnect interval's recomputed
* for each attempt. Value is the reconnect interval.
*/
public static final int EVENT_CONNECT_RETRIED = zmq.ZMQ.ZMQ_EVENT_CONNECT_RETRIED;
/**
* @see org.zeromq.ZMQ#EVENT_CONNECT_RETRIED
*/
@Deprecated
public static final int EVENT_RETRIED = EVENT_CONNECT_RETRIED;
/**
* EVENT_LISTENING: socket bound to an address, ready to accept connections.
* The EVENT_LISTENING event triggers when a socket's successfully bound to
* a an interface. Value is the FD of the newly bound socket.
*/
public static final int EVENT_LISTENING = zmq.ZMQ.ZMQ_EVENT_LISTENING;
/**
* EVENT_BIND_FAILED: socket could not bind to an address.
* The EVENT_BIND_FAILED event triggers when a socket could not bind to a
* given interface. Value is the errno generated by the bind call.
*/
public static final int EVENT_BIND_FAILED = zmq.ZMQ.ZMQ_EVENT_BIND_FAILED;
/**
* EVENT_ACCEPTED: connection accepted to bound interface.
* The EVENT_ACCEPTED event triggers when a connection from a remote peer
* has been established with a socket's listen address. Value is the FD of
* the accepted socket.
*/
public static final int EVENT_ACCEPTED = zmq.ZMQ.ZMQ_EVENT_ACCEPTED;
/**
* EVENT_ACCEPT_FAILED: could not accept client connection.
* The EVENT_ACCEPT_FAILED event triggers when a connection attempt to a
* socket's bound address fails. Value is the errno generated by accept.
*/
public static final int EVENT_ACCEPT_FAILED = zmq.ZMQ.ZMQ_EVENT_ACCEPT_FAILED;
/**
* EVENT_CLOSED: connection closed.
* The EVENT_CLOSED event triggers when a connection's underlying
* descriptor has been closed. Value is the former FD of the for the
* closed socket. FD has been closed already!
*/
public static final int EVENT_CLOSED = zmq.ZMQ.ZMQ_EVENT_CLOSED;
/**
* EVENT_CLOSE_FAILED: connection couldn't be closed.
* The EVENT_CLOSE_FAILED event triggers when a descriptor could not be
* released back to the OS. Implementation note: ONLY FOR IPC SOCKETS.
* Value is the errno generated by unlink.
*/
public static final int EVENT_CLOSE_FAILED = zmq.ZMQ.ZMQ_EVENT_CLOSE_FAILED;
/**
* EVENT_DISCONNECTED: broken session.
* The EVENT_DISCONNECTED event triggers when the stream engine (tcp and
* ipc specific) detects a corrupted / broken session. Value is the FD of
* the socket.
*/
public static final int EVENT_DISCONNECTED = zmq.ZMQ.ZMQ_EVENT_DISCONNECTED;
/**
* EVENT_MONITOR_STOPPED: monitor has been stopped.
* The EVENT_MONITOR_STOPPED event triggers when the monitor for a socket is
* stopped.
*/
public static final int EVENT_MONITOR_STOPPED = zmq.ZMQ.ZMQ_EVENT_MONITOR_STOPPED;
/**
* EVENT_HANDSHAKE_PROTOCOL: protocol has been successfully negotiated.
* The EVENT_HANDSHAKE_PROTOCOL event triggers when the stream engine (tcp and ipc)
* successfully negotiated a protocol version with the peer. Value is the version number
* (0 for unversioned, 3 for V3).
*/
public static final int EVENT_HANDSHAKE_PROTOCOL = zmq.ZMQ.ZMQ_EVENT_HANDSHAKE_PROTOCOL;
/**
* EVENT_ALL: all events known.
* The EVENT_ALL constant can be used to set up a monitor for all known events.
*/
public static final int EVENT_ALL = zmq.ZMQ.ZMQ_EVENT_ALL;
public static final byte[] MESSAGE_SEPARATOR = zmq.ZMQ.MESSAGE_SEPARATOR;
public static final byte[] SUBSCRIPTION_ALL = zmq.ZMQ.SUBSCRIPTION_ALL;
public static final byte[] PROXY_PAUSE = zmq.ZMQ.PROXY_PAUSE;
public static final byte[] PROXY_RESUME = zmq.ZMQ.PROXY_RESUME;
public static final byte[] PROXY_TERMINATE = zmq.ZMQ.PROXY_TERMINATE;
public static final Charset CHARSET = zmq.ZMQ.CHARSET;
private ZMQ()
{
}
/**
* Create a new Context.
*
* @param ioThreads Number of threads to use, usually 1 is sufficient for most use cases.
* @return the Context
*/
public static Context context(int ioThreads)
{
return new Context(ioThreads);
}
@Deprecated
public static boolean device(int type, Socket frontend, Socket backend)
{
return zmq.ZMQ.proxy(frontend.base, backend.base, null);
}
/**
* Starts the built-in 0MQ proxy in the current application thread.
* The proxy connects a frontend socket to a backend socket. Conceptually, data flows from frontend to backend.
* Depending on the socket types, replies may flow in the opposite direction. The direction is conceptual only;
* the proxy is fully symmetric and there is no technical difference between frontend and backend.
* <p>
* Before calling ZMQ.proxy() you must set any socket options, and connect or bind both frontend and backend sockets.
* The two conventional proxy models are:
* <p>
* ZMQ.proxy() runs in the current thread and returns only if/when the current context is closed.
*
* @param frontend ZMQ.Socket
* @param backend ZMQ.Socket
* @param capture If the capture socket is not NULL, the proxy shall send all messages, received on both
* frontend and backend, to the capture socket. The capture socket should be a
* ZMQ_PUB, ZMQ_DEALER, ZMQ_PUSH, or ZMQ_PAIR socket.
*/
public static boolean proxy(Socket frontend, Socket backend, Socket capture)
{
return zmq.ZMQ.proxy(frontend.base, backend.base, capture != null ? capture.base : null);
}
public static boolean proxy(Socket frontend, Socket backend, Socket capture, Socket control)
{
return zmq.ZMQ.proxy(
frontend.base,
backend.base,
capture == null ? null : capture.base,
control == null ? null : control.base);
}
public static int poll(Selector selector, PollItem[] items, long timeout)
{
return poll(selector, items, items.length, timeout);
}
public static int poll(Selector selector, PollItem[] items, int count, long timeout)
{
zmq.poll.PollItem[] pollItems = new zmq.poll.PollItem[count];
for (int i = 0; i < count; i++) {
pollItems[i] = items[i].base;
}
return zmq.ZMQ.poll(selector, pollItems, count, timeout);
}
/**
* @return Major version number of the ZMQ library.
*/
public static int getMajorVersion()
{
return zmq.ZMQ.ZMQ_VERSION_MAJOR;
}
/**
* @return Major version number of the ZMQ library.
*/
public static int getMinorVersion()
{
return zmq.ZMQ.ZMQ_VERSION_MINOR;
}
/**
* @return Major version number of the ZMQ library.
*/
public static int getPatchVersion()
{
return zmq.ZMQ.ZMQ_VERSION_PATCH;
}
/**
* @return Full version number of the ZMQ library used for comparing versions.
*/
public static int getFullVersion()
{
return zmq.ZMQ.makeVersion(zmq.ZMQ.ZMQ_VERSION_MAJOR, zmq.ZMQ.ZMQ_VERSION_MINOR, zmq.ZMQ.ZMQ_VERSION_PATCH);
}
/**
* @param major Version major component.
* @param minor Version minor component.
* @param patch Version patch component.
* @return Comparible single int version number.
*/
public static int makeVersion(final int major, final int minor, final int patch)
{
return zmq.ZMQ.makeVersion(major, minor, patch);
}
/**
* @return String version number in the form major.minor.patch.
*/
public static String getVersionString()
{
return "" + zmq.ZMQ.ZMQ_VERSION_MAJOR + "." + zmq.ZMQ.ZMQ_VERSION_MINOR + "." + zmq.ZMQ.ZMQ_VERSION_PATCH;
}
public static void msleep(long millis)
{
zmq.ZMQ.msleep(millis);
}
public static void sleep(long seconds)
{
zmq.ZMQ.sleep(seconds);
}
public static void sleep(long amount, TimeUnit unit)
{
zmq.ZMQ.sleep(amount, unit);
}
public enum Error
{
ENOTSUP(ZError.ENOTSUP) {
@Override
public String getMessage()
{
return "Not supported";
}
},
EPROTONOSUPPORT(ZError.EPROTONOSUPPORT) {
@Override
public String getMessage()
{
return "Protocol not supported";
}
},
ENOBUFS(ZError.ENOBUFS) {
@Override
public String getMessage()
{
return "No buffer space available";
}
},
ENETDOWN(ZError.ENETDOWN) {
@Override
public String getMessage()
{
return "Network is down";
}
},
EADDRINUSE(ZError.EADDRINUSE) {
@Override
public String getMessage()
{
return "Address already in use";
}
},
EADDRNOTAVAIL(ZError.EADDRNOTAVAIL) {
@Override
public String getMessage()
{
return "Address not available";
}
},
ECONNREFUSED(ZError.ECONNREFUSED) {
@Override
public String getMessage()
{
return "Connection refused";
}
},
EINPROGRESS(ZError.EINPROGRESS) {
@Override
public String getMessage()
{
return "Operation in progress";
}
},
EHOSTUNREACH(ZError.EHOSTUNREACH) {
@Override
public String getMessage()
{
return "Host unreachable";
}
},
EMTHREAD(ZError.EMTHREAD) {
@Override
public String getMessage()
{
return "No thread available";
}
},
EFSM(ZError.EFSM) {
@Override
public String getMessage()
{
return "Operation cannot be accomplished in current state";
}
},
ENOCOMPATPROTO(ZError.ENOCOMPATPROTO) {
@Override
public String getMessage()
{
return "The protocol is not compatible with the socket type";
}
},
ETERM(ZError.ETERM) {
@Override
public String getMessage()
{
return "Context was terminated";
}
},
ENOTSOCK(ZError.ENOTSOCK),
EAGAIN(ZError.EAGAIN),
ENOENT(ZError.ENOENT),
EINTR(ZError.EINTR),
EACCESS(ZError.EACCESS),
EFAULT(ZError.EFAULT),
EINVAL(ZError.EINVAL),
EISCONN(ZError.EISCONN),
ENOTCONN(ZError.ENOTCONN),
EMSGSIZE(ZError.EMSGSIZE),
EAFNOSUPPORT(ZError.EAFNOSUPPORT),
ENETUNREACH(ZError.ENETUNREACH),
ECONNABORTED(ZError.ECONNABORTED),
ECONNRESET(ZError.ECONNRESET),
ETIMEDOUT(ZError.ETIMEDOUT),
ENETRESET(ZError.ENETRESET),
EIOEXC(ZError.EIOEXC),
ESOCKET(ZError.ESOCKET),
EMFILE(ZError.EMFILE),
EPROTO(ZError.EPROTO);
private static final Map<Integer, Error> map = new HashMap<>(Error.values().length);
static {
for (Error e : Error.values()) {
map.put(e.code, e);
}
}
private final int code;
Error(int code)
{
this.code = code;
}
public static Error findByCode(int code)
{
if (map.containsKey(code)) {
return map.get(code);
}
else {
throw new IllegalArgumentException("Unknown " + Error.class.getName() + " enum code: " + code);
}
}
public int getCode()
{
return code;
}
public String getMessage()
{
return "errno " + Integer.toString(code);
}
}
/**
* Container for all sockets in a single process,
* acting as the transport for inproc sockets,
* which are the fastest way to connect threads in one process.
*/
public static class Context implements Closeable
{
private final AtomicBoolean closed = new AtomicBoolean(false);
private final Ctx ctx;
/**
* Class constructor.
*
* @param ioThreads size of the threads pool to handle I/O operations.
*/
protected Context(int ioThreads)
{
ctx = zmq.ZMQ.init(ioThreads);
}
/**
* Returns true if terminate() has been called on ctx.
*/
public boolean isTerminated()
{
return !ctx.isActive();
}
/**
* The size of the 0MQ thread pool to handle I/O operations.
*/
public int getIOThreads()
{
return ctx.get(zmq.ZMQ.ZMQ_IO_THREADS);
}
/**
* Set the size of the 0MQ thread pool to handle I/O operations.
*/
public boolean setIOThreads(int ioThreads)
{
return ctx.set(zmq.ZMQ.ZMQ_IO_THREADS, ioThreads);
}
/**
* The maximum number of sockets allowed on the context
*/
public int getMaxSockets()
{
return ctx.get(zmq.ZMQ.ZMQ_MAX_SOCKETS);
}
/**
* Sets the maximum number of sockets allowed on the context
*/
public boolean setMaxSockets(int maxSockets)
{
return ctx.set(zmq.ZMQ.ZMQ_MAX_SOCKETS, maxSockets);
}
/**
* @deprecated use {@link #isBlocky()} instead
*/
@Deprecated
public boolean getBlocky()
{
return isBlocky();
}
public boolean isBlocky()
{
return ctx.get(zmq.ZMQ.ZMQ_BLOCKY) != 0;
}
public boolean setBlocky(boolean block)
{
return ctx.set(zmq.ZMQ.ZMQ_BLOCKY, block ? 1 : 0);
}
public boolean isIPv6()
{
return ctx.get(zmq.ZMQ.ZMQ_IPV6) != 0;
}
public boolean getIPv6()
{
return isIPv6();
}
public boolean setIPv6(boolean ipv6)
{
return ctx.set(zmq.ZMQ.ZMQ_IPV6, ipv6 ? 1 : 0);
}
/**
* This is an explicit "destructor". It can be called to ensure the corresponding 0MQ
* Context has been disposed of.
*/
public void term()
{
if (closed.compareAndSet(false, true)) {
ctx.terminate();
}
}
public boolean isClosed()
{
return closed.get();
}
public Socket socket(SocketType type)
{
return new Socket(this, type);
}
@Deprecated
public Socket socket(int type)
{
return socket(SocketType.type(type));
}
/**
* Create a new Selector within this context.
*
* @return the newly created Selector.
*/
public Selector selector()
{
return ctx.createSelector();
}
public boolean close(Selector selector)
{
return ctx.closeSelector(selector);
}
/**
* Create a new Poller within this context, with a default size.
* DO NOT FORGET TO CLOSE THE POLLER AFTER USE with {@link Poller#close()}
*
* @return the newly created Poller.
*/
public Poller poller()
{
return new Poller(this);
}
/**
* Create a new Poller within this context, with a specified initial size.
* DO NOT FORGET TO CLOSE THE POLLER AFTER USE with {@link Poller#close()}
*
* @param size the poller initial size.
* @return the newly created Poller.
*/
public Poller poller(int size)
{
return new Poller(this, size);
}
@Override
public void close()
{
term();
}
}
public static class Socket implements Closeable
{
// This port range is defined by IANA for dynamic or private ports
// We use this when choosing a port for dynamic binding.
private static final int DYNFROM = 0xc000;
private static final int DYNTO = 0xffff;
private final ZContext zctx;
private final SocketBase base;
private final AtomicBoolean isClosed = new AtomicBoolean(false);
/**
* Class constructor.
*
* @param context a 0MQ context previously created.
* @param type the socket type.
*/
protected Socket(Context context, SocketType type)
{
this(context, null, type.type);
}
/**
* Class constructor.
*
* @param context a 0MQ context previously created.
* @param type the socket type.
*/
protected Socket(ZContext context, SocketType type)
{
this(context.getContext(), context, type.type);
}
/**
* Class constructor.
*
* @param context a 0MQ context previously created.
* @param type the socket type.
* @deprecated use {@link Socket#Socket(Context, SocketType)}
*/
@Deprecated
protected Socket(Context context, int type)
{
this(context, null, type);
}
private Socket(Context context, ZContext zctx, int type)
{
this.zctx = zctx;
base = context.ctx.createSocket(type);
}
protected Socket(SocketBase base)
{
zctx = null;
this.base = base;
}
/**
* DO NOT USE if you're trying to build a special proxy
*
* @return raw zmq.SocketBase
*/
public SocketBase base()
{
return base;
}
/**
* This is an explicit "destructor". It can be called to ensure the corresponding 0MQ Socket
* has been disposed of. If the socket was created from a org.zeromq.ZContext, it will remove
* the reference to this socket from it.
*/
@SuppressWarnings("deprecation")
@Override
public void close()
{
if (zctx != null) {
zctx.destroySocket(this);
}
else {
internalClose();
}
}
void internalClose()
{
if (isClosed.compareAndSet(false, true)) {
base.close();
}
}
/**
* The 'ZMQ_TYPE option shall retrieve the socket type for the specified
* 'socket'. The socket type is specified at socket creation time and
* cannot be modified afterwards.
*
* @return the socket type.
*/
public int getType()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_TYPE);
}
/**
* The 'ZMQ_TYPE option shall retrieve the socket type for the specified
* 'socket'. The socket type is specified at socket creation time and
* cannot be modified afterwards.
*
* @return the socket type as an enum.
*/
public SocketType getSocketType()
{
return SocketType.type(getType());
}
/**
* The 'ZMQ_LINGER' option shall retrieve the period for pending outbound
* messages to linger in memory after closing the socket. Value of -1 means
* infinite. Pending messages will be kept until they are fully transferred to
* the peer. Value of 0 means that all the pending messages are dropped immediately
* when socket is closed. Positive value means number of milliseconds to keep
* trying to send the pending messages before discarding them.
*
* @return the linger period.
* @see #setLinger(int)
*/
public int getLinger()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_LINGER);
}
private boolean setSocketOpt(int option, Object value)
{
try {
boolean set = base.setSocketOpt(option, value);
set &= base.errno() != ZError.EINVAL;
return set;
}
catch (CtxTerminatedException e) {
return false;
}
}
/**
* The ZMQ_LINGER option shall set the linger period for the specified socket.
* The linger period determines how long pending messages which have yet to be sent to a peer
* shall linger in memory after a socket is disconnected with disconnect or closed with close,
* and further affects the termination of the socket's context with Ctx#term.
* The following outlines the different behaviours: A value of -1 specifies an infinite linger period.
* Pending messages shall not be discarded after a call to disconnect() or close();
* attempting to terminate the socket's context with Ctx#term() shall block until all pending messages have been sent to a peer.
* The value of 0 specifies no linger period. Pending messages shall be discarded immediately after a call to disconnect() or close().
* Positive values specify an upper bound for the linger period in milliseconds.
* Pending messages shall not be discarded after a call to disconnect() or close();
* attempting to terminate the socket's context with Ctx#term() shall block until either all pending messages have been sent to a peer,
* or the linger period expires, after which any pending messages shall be discarded.
*
* @param value the linger period in milliseconds.
* @return true if the option was set, otherwise false
* @see #getLinger()
* @deprecated the linger option has only integer range, use {@link #setLinger(int)} instead
*/
@Deprecated
public boolean setLinger(long value)
{
return setLinger(Long.valueOf(value).intValue());
}
/**
* The ZMQ_LINGER option shall set the linger period for the specified socket.
* The linger period determines how long pending messages which have yet to be sent to a peer
* shall linger in memory after a socket is disconnected with disconnect or closed with close,
* and further affects the termination of the socket's context with Ctx#term.
* The following outlines the different behaviours: A value of -1 specifies an infinite linger period.
* Pending messages shall not be discarded after a call to disconnect() or close();
* attempting to terminate the socket's context with Ctx#term() shall block until all pending messages have been sent to a peer.
* The value of 0 specifies no linger period. Pending messages shall be discarded immediately after a call to disconnect() or close().
* Positive values specify an upper bound for the linger period in milliseconds.
* Pending messages shall not be discarded after a call to disconnect() or close();
* attempting to terminate the socket's context with Ctx#term() shall block until either all pending messages have been sent to a peer,
* or the linger period expires, after which any pending messages shall be discarded.
*
* @param value the linger period in milliseconds.
* @return true if the option was set, otherwise false
* @see #getLinger()
*/
public boolean setLinger(int value)
{
return base.setSocketOpt(zmq.ZMQ.ZMQ_LINGER, value);
}
public int getReconnectIVL()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_RECONNECT_IVL);
}
@Deprecated
public boolean setReconnectIVL(long value)
{
return setReconnectIVL(Long.valueOf(value).intValue());
}
public boolean setReconnectIVL(int value)
{
return base.setSocketOpt(zmq.ZMQ.ZMQ_RECONNECT_IVL, value);
}
/**
* The ZMQ_BACKLOG option shall retrieve the maximum length of the queue
* of outstanding peer connections for the specified socket;
* this only applies to connection-oriented transports.
* For details refer to your operating system documentation for the listen function.
*
* @return the the maximum length of the queue of outstanding peer connections.
* @see #setBacklog(int)
*/
public int getBacklog()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_BACKLOG);
}
/**
* The ZMQ_BACKLOG option shall set the maximum length
* of the queue of outstanding peer connections for the specified socket;
* this only applies to connection-oriented transports.
* For details refer to your operating system documentation for the listen function.
*
* @param value the maximum length of the queue of outstanding peer connections.
* @return true if the option was set, otherwise false.
* @see #getBacklog()
* @deprecated this option uses integer range, use {@link #setBacklog(int)} instead.
*/
@Deprecated
public boolean setBacklog(long value)
{
return setBacklog(Long.valueOf(value).intValue());
}
/**
* The ZMQ_BACKLOG option shall set the maximum length
* of the queue of outstanding peer connections for the specified socket;
* this only applies to connection-oriented transports.
* For details refer to your operating system documentation for the listen function.
*
* @param value the maximum length of the queue of outstanding peer connections.
* @return true if the option was set, otherwise false.
* @see #getBacklog()
*/
public boolean setBacklog(int value)
{
return setSocketOpt(zmq.ZMQ.ZMQ_BACKLOG, value);
}
/**
* The ZMQ_HANDSHAKE_IVL option shall retrieve the maximum handshake interval
* for the specified socket.
* Handshaking is the exchange of socket configuration information
* (socket type, identity, security) that occurs when a connection is first opened,
* only for connection-oriented transports.
* If handshaking does not complete within the configured time,
* the connection shall be closed. The value 0 means no handshake time limit.
*
* @return the maximum handshake interval.
* @see #setHandshakeIvl(int)
*/
public int getHandshakeIvl()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_HANDSHAKE_IVL);
}
/**
* The ZMQ_HEARTBEAT_IVL option shall set the interval
* between sending ZMTP heartbeats for the specified socket.
* If this option is set and is greater than 0,
* then a PING ZMTP command will be sent every ZMQ_HEARTBEAT_IVL milliseconds.
*
* @return heartbeat interval in milliseconds
*/
public int getHeartbeatIvl()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_HEARTBEAT_IVL);
}
/**
* The ZMQ_HEARTBEAT_TIMEOUT option shall set
* how long to wait before timing-out a connection
* after sending a PING ZMTP command and not receiving any traffic.
* This option is only valid if ZMQ_HEARTBEAT_IVL is also set,
* and is greater than 0. The connection will time out
* if there is no traffic received after sending the PING command,
* but the received traffic does not have to be a PONG command
* - any received traffic will cancel the timeout.
*
* @return heartbeat timeout in milliseconds
*/
public int getHeartbeatTimeout()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_HEARTBEAT_TIMEOUT);
}
/**
* The ZMQ_HEARTBEAT_TTL option shall set the timeout
* on the remote peer for ZMTP heartbeats.
* If this option is greater than 0,
* the remote side shall time out the connection
* if it does not receive any more traffic within the TTL period.
* This option does not have any effect if ZMQ_HEARTBEAT_IVL is not set or is 0.
* Internally, this value is rounded down to the nearest decisecond,
* any value less than 100 will have no effect.
*
* @return heartbeat time-to-live in milliseconds
*/
public int getHeartbeatTtl()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_HEARTBEAT_TTL);
}
/**
* The ZMQ_HEARTBEAT_CONTEXT option shall set the ping context
* of the peer for ZMTP heartbeats.
* <p>
* This API is in DRAFT state and is subject to change at ANY time until declared stable.
* <p>
* If this option is set, every ping message sent for heartbeat will contain this context.
*
* @return the context to be sent with ping messages. Empty array by default.
*/
@Draft
public byte[] getHeartbeatContext()
{
return (byte[]) base.getSocketOptx(zmq.ZMQ.ZMQ_HEARTBEAT_CONTEXT);
}
/**
* The ZMQ_HANDSHAKE_IVL option shall set the maximum handshake interval for the specified socket.
* Handshaking is the exchange of socket configuration information (socket type, identity, security)
* that occurs when a connection is first opened, only for connection-oriented transports.
* If handshaking does not complete within the configured time, the connection shall be closed.
* The value 0 means no handshake time limit.
*
* @param maxHandshakeIvl the maximum handshake interval
* @return true if the option was set, otherwise false
* @see #getHandshakeIvl()
*/
public boolean setHandshakeIvl(int maxHandshakeIvl)
{
return setSocketOpt(zmq.ZMQ.ZMQ_HANDSHAKE_IVL, maxHandshakeIvl);
}
/**
* The ZMQ_HEARTBEAT_IVL option shall set the interval
* between sending ZMTP heartbeats for the specified socket.
* If this option is set and is greater than 0,
* then a PING ZMTP command will be sent every ZMQ_HEARTBEAT_IVL milliseconds.
*
* @param heartbeatIvl heartbeat interval in milliseconds
* @return true if the option was set, otherwise false
*/
public boolean setHeartbeatIvl(int heartbeatIvl)
{
return setSocketOpt(zmq.ZMQ.ZMQ_HEARTBEAT_IVL, heartbeatIvl);
}
/**
* The ZMQ_HEARTBEAT_TIMEOUT option shall set
* how long to wait before timing-out a connection
* after sending a PING ZMTP command and not receiving any traffic.
* This option is only valid if ZMQ_HEARTBEAT_IVL is also set,
* and is greater than 0. The connection will time out
* if there is no traffic received after sending the PING command,
* but the received traffic does not have to be a PONG command
* - any received traffic will cancel the timeout.
*
* @param heartbeatTimeout heartbeat timeout in milliseconds
* @return true if the option was set, otherwise false
*/
public boolean setHeartbeatTimeout(int heartbeatTimeout)
{
return setSocketOpt(zmq.ZMQ.ZMQ_HEARTBEAT_TIMEOUT, heartbeatTimeout);
}
/**
* The ZMQ_HEARTBEAT_TTL option shall set the timeout
* on the remote peer for ZMTP heartbeats.
* If this option is greater than 0,
* the remote side shall time out the connection
* if it does not receive any more traffic within the TTL period.
* This option does not have any effect if ZMQ_HEARTBEAT_IVL is not set or is 0.
* Internally, this value is rounded down to the nearest decisecond,
* any value less than 100 will have no effect.
*
* @param heartbeatTtl heartbeat time-to-live in milliseconds
* @return true if the option was set, otherwise false
*/
public boolean setHeartbeatTtl(int heartbeatTtl)
{
return setSocketOpt(zmq.ZMQ.ZMQ_HEARTBEAT_TTL, heartbeatTtl);
}
/**
* The ZMQ_HEARTBEAT_CONTEXT option shall set the ping context
* of the peer for ZMTP heartbeats.
* <p>
* This API is in DRAFT state and is subject to change at ANY time until declared stable.
* <p>
* If this option is set, every ping message sent for heartbeat will contain this context.
*
* @param pingContext the context to be sent with ping messages.
* @return true if the option was set, otherwise false
*/
@Draft
public boolean setHeartbeatContext(byte[] pingContext)
{
return setSocketOpt(zmq.ZMQ.ZMQ_HEARTBEAT_CONTEXT, pingContext);
}
/**
* Retrieve the IP_TOS option for the socket.
*
* @return the value of the Type-Of-Service set for the socket.
* @see #setTos(int)
*/
public int getTos()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_TOS);
}
/**
* Sets the ToS fields (Differentiated services (DS)
* and Explicit Congestion Notification (ECN) field of the IP header.
* The ToS field is typically used to specify a packets priority.
* The availability of this option is dependent on intermediate network equipment
* that inspect the ToS field andprovide a path for low-delay, high-throughput, highly-reliable service, etc.
*
* @return true if the option was set, otherwise false.
* @see #getTos()
*/
public boolean setTos(int value)
{
return setSocketOpt(zmq.ZMQ.ZMQ_TOS, value);
}
public int getReconnectIVLMax()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_RECONNECT_IVL_MAX);
}
@Deprecated
public boolean setReconnectIVLMax(long value)
{
return setReconnectIVLMax(Long.valueOf(value).intValue());
}
public boolean setReconnectIVLMax(int value)
{
return setSocketOpt(zmq.ZMQ.ZMQ_RECONNECT_IVL_MAX, value);
}
/**
* The option shall retrieve limit for the inbound messages.
* If a peer sends a message larger than ZMQ_MAXMSGSIZE it is disconnected.
* Value of -1 means no limit.
*
* @return the maxMsgSize.
* @see #setMaxMsgSize(long)
*/
public long getMaxMsgSize()
{
return (Long) base.getSocketOptx(zmq.ZMQ.ZMQ_MAXMSGSIZE);
}
/**
* Limits the size of the inbound message.
* If a peer sends a message larger than ZMQ_MAXMSGSIZE it is disconnected.
* Value of -1 means no limit.
*
* @return true if the option was set, otherwise false
* @see #getMaxMsgSize()
*/
public boolean setMaxMsgSize(long value)
{
return setSocketOpt(zmq.ZMQ.ZMQ_MAXMSGSIZE, value);
}
public int getSndHWM()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_SNDHWM);
}
@Deprecated
public boolean setSndHWM(long value)
{
return setSndHWM(Long.valueOf(value).intValue());
}
public boolean setSndHWM(int value)
{
return setSocketOpt(zmq.ZMQ.ZMQ_SNDHWM, value);
}
public int getRcvHWM()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_RCVHWM);
}
@Deprecated
public boolean setRcvHWM(long value)
{
return setRcvHWM(Long.valueOf(value).intValue());
}
public boolean setRcvHWM(int value)
{
return setSocketOpt(zmq.ZMQ.ZMQ_RCVHWM, value);
}
/**
* @return the High Water Mark.
* @see #setHWM(int)
*/
@Deprecated
public int getHWM()
{
return -1;
}
/**
* The 'ZMQ_HWM' option shall set the high water mark for the specified 'socket'. The high
* water mark is a hard limit on the maximum number of outstanding messages 0MQ shall queue
* in memory for any single peer that the specified 'socket' is communicating with.
* <p>
* If this limit has been reached the socket shall enter an exceptional state and depending
* on the socket type, 0MQ shall take appropriate action such as blocking or dropping sent
* messages. Refer to the individual socket descriptions in the man page of zmq_socket[3] for
* details on the exact action taken for each socket type.
*
* @param hwm the number of messages to queue.
* @return true if the option was set, otherwise false.
* @deprecated this option uses integer range, use {@link #setHWM(int)} instead
*/
@Deprecated
public boolean setHWM(long hwm)
{
boolean set = true;
set |= setSndHWM(hwm);
set |= setRcvHWM(hwm);
return set;
}
/**
* The 'ZMQ_HWM' option shall set the high water mark for the specified 'socket'. The high
* water mark is a hard limit on the maximum number of outstanding messages 0MQ shall queue
* in memory for any single peer that the specified 'socket' is communicating with.
* <p>
* If this limit has been reached the socket shall enter an exceptional state and depending
* on the socket type, 0MQ shall take appropriate action such as blocking or dropping sent
* messages. Refer to the individual socket descriptions in the man page of zmq_socket[3] for
* details on the exact action taken for each socket type.
*
* @param hwm the number of messages to queue.
* @return true if the option was set, otherwise false
*/
public boolean setHWM(int hwm)
{
boolean set = false;
set |= setSndHWM(hwm);
set |= setRcvHWM(hwm);
return set;
}
/**
* @return the number of messages to swap at most.
* @see #setSwap(long)
*/
@Deprecated
public long getSwap()
{
// not support at zeromq 3
return -1L;
}
/**
* If set, a socket shall keep only one message in its inbound/outbound queue,
* this message being the last message received/the last message to be sent.
* Ignores ZMQ_RCVHWM and ZMQ_SNDHWM options.
* Does not support multi-part messages, in particular,
* only one part of it is kept in the socket internal queue.
*
* @param conflate true to keep only one message, false for standard behaviour.
* @return true if the option was set, otherwise false.
* @see #isConflate()
*/
public boolean setConflate(boolean conflate)
{
return setSocketOpt(zmq.ZMQ.ZMQ_CONFLATE, conflate);
}
/**
* If in conflate mode, a socket shall keep only one message in its inbound/outbound queue,
* this message being the last message received/the last message to be sent.
* Ignores ZMQ_RCVHWM and ZMQ_SNDHWM options.
* Does not support multi-part messages, in particular,
* only one part of it is kept in the socket internal queue.
*
* @return true to keep only one message, false for standard behaviour.
* @see #setConflate(boolean)
*/
public boolean isConflate()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_CONFLATE) != 0;
}
/**
* If in conflate mode, a socket shall keep only one message in its inbound/outbound queue,
* this message being the last message received/the last message to be sent.
* Ignores ZMQ_RCVHWM and ZMQ_SNDHWM options.
* Does not support multi-part messages, in particular,
* only one part of it is kept in the socket internal queue.
*
* @return true to keep only one message, false for standard behaviour.
* @see #setConflate(boolean)
*/
public boolean getConflate()
{
return isConflate();
}
/**
* Get the Swap. The 'ZMQ_SWAP' option shall set the disk offload (swap) size for the
* specified 'socket'. A socket which has 'ZMQ_SWAP' set to a non-zero value may exceed its
* high water mark; in this case outstanding messages shall be offloaded to storage on disk
* rather than held in memory.
*
* @param value The value of 'ZMQ_SWAP' defines the maximum size of the swap space in bytes.
*/
@Deprecated
public boolean setSwap(long value)
{
throw new UnsupportedOperationException();
}
/**
* @return the affinity.
* @see #setAffinity(long)
*/
public long getAffinity()
{
return (Long) base.getSocketOptx(zmq.ZMQ.ZMQ_AFFINITY);
}
/**
* Get the Affinity. The 'ZMQ_AFFINITY' option shall set the I/O thread affinity for newly
* created connections on the specified 'socket'.
* <p>
* Affinity determines which threads from the 0MQ I/O thread pool associated with the
* socket's _context_ shall handle newly created connections. A value of zero specifies no
* affinity, meaning that work shall be distributed fairly among all 0MQ I/O threads in the
* thread pool. For non-zero values, the lowest bit corresponds to thread 1, second lowest
* bit to thread 2 and so on. For example, a value of 3 specifies that subsequent
* connections on 'socket' shall be handled exclusively by I/O threads 1 and 2.
* <p>
* See also in the man page of init[3] for details on allocating the number of I/O threads for a
* specific _context_.
*
* @param value the io_thread affinity.
* @return true if the option was set, otherwise false
*/
public boolean setAffinity(long value)
{
return setSocketOpt(zmq.ZMQ.ZMQ_AFFINITY, value);
}
/**
* @return the Identitiy.
* @see #setIdentity(byte[])
*/
public byte[] getIdentity()
{
return (byte[]) base.getSocketOptx(zmq.ZMQ.ZMQ_IDENTITY);
}
/**
* The 'ZMQ_IDENTITY' option shall set the identity of the specified 'socket'. Socket
* identity determines if existing 0MQ infastructure (_message queues_, _forwarding
* devices_) shall be identified with a specific application and persist across multiple
* runs of the application.
* <p>
* If the socket has no identity, each run of an application is completely separate from
* other runs. However, with identity set the socket shall re-use any existing 0MQ
* infrastructure configured by the previous run(s). Thus the application may receive
* messages that were sent in the meantime, _message queue_ limits shall be shared with
* previous run(s) and so on.
* <p>
* Identity should be at least one byte and at most 255 bytes long. Identities starting with
* binary zero are reserved for use by 0MQ infrastructure.
*
* @param identity
* @return true if the option was set, otherwise false
*/
public boolean setIdentity(byte[] identity)
{
return setSocketOpt(zmq.ZMQ.ZMQ_IDENTITY, identity);
}
/**
* @return the Rate.
* @see #setRate(long)
*/
public long getRate()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_RATE);
}
/**
* The 'ZMQ_RATE' option shall set the maximum send or receive data rate for multicast
* transports such as in the man page of zmq_pgm[7] using the specified 'socket'.
*
* @param value maximum send or receive data rate for multicast, default 100
* @return true if the option was set, otherwise false
*/
public boolean setRate(long value)
{
throw new UnsupportedOperationException();
}
/**
* The ZMQ_RECOVERY_IVL option shall retrieve the recovery interval for multicast transports
* using the specified socket. The recovery interval determines the maximum time in milliseconds
* that a receiver can be absent from a multicast group before unrecoverable data loss will occur.
*
* @return the RecoveryIntervall.
* @see #setRecoveryInterval(long)
*/
public long getRecoveryInterval()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_RECOVERY_IVL);
}
/**
* The 'ZMQ_RECOVERY_IVL' option shall set the recovery interval for multicast transports
* using the specified 'socket'. The recovery interval determines the maximum time in
* seconds that a receiver can be absent from a multicast group before unrecoverable data
* loss will occur.
* <p>
* CAUTION: Exercise care when setting large recovery intervals as the data needed for
* recovery will be held in memory. For example, a 1 minute recovery interval at a data rate
* of 1Gbps requires a 7GB in-memory buffer. {Purpose of this Method}
*
* @param value recovery interval for multicast in milliseconds, default 10000
* @return true if the option was set, otherwise false.
* @see #getRecoveryInterval()
*/
public boolean setRecoveryInterval(long value)
{
throw new UnsupportedOperationException();
}
public boolean setReqCorrelate(boolean correlate)
{
return setSocketOpt(zmq.ZMQ.ZMQ_REQ_CORRELATE, correlate);
}
@Deprecated
public boolean getReqCorrelate()
{
throw new UnsupportedOperationException();
}
/**
* By default, a REQ socket does not allow initiating a new request with zmq_send(3)
* until the reply to the previous one has been received.
* When set to true, sending another message is allowed and has the effect of disconnecting
* the underlying connection to the peer from which the reply was expected,
* triggering a reconnection attempt on transports that support it.
* The request-reply state machine is reset and a new request is sent to the next available peer.
* If set to true, also enable ZMQ_REQ_CORRELATE to ensure correct matching of requests and replies.
* Otherwise a late reply to an aborted request can be reported as the reply to the superseding request.
*
* @param relaxed
* @return true if the option was set, otherwise false
* @see #getReqRelaxed()
*/
public boolean setReqRelaxed(boolean relaxed)
{
return setSocketOpt(zmq.ZMQ.ZMQ_REQ_RELAXED, relaxed);
}
/**
* By default, a REQ socket does not allow initiating a new request with zmq_send(3)
* until the reply to the previous one has been received.
* When set to true, sending another message is allowed and has the effect of disconnecting
* the underlying connection to the peer from which the reply was expected,
* triggering a reconnection attempt on transports that support it.
* The request-reply state machine is reset and a new request is sent to the next available peer.
* If set to true, also enable ZMQ_REQ_CORRELATE to ensure correct matching of requests and replies.
* Otherwise a late reply to an aborted request can be reported as the reply to the superseding request.
*
* @return state of the ZMQ_REQ_RELAXED option.
* @see #setReqRelaxed(boolean)
*/
@Deprecated
public boolean getReqRelaxed()
{
throw new UnsupportedOperationException();
}
/**
* @return the Multicast Loop.
* @see #setMulticastLoop(boolean)
*/
@Deprecated
public boolean hasMulticastLoop()
{
return false;
}
/**
* The 'ZMQ_MCAST_LOOP' option shall control whether data sent via multicast transports
* using the specified 'socket' can also be received by the sending host via loopback. A
* value of zero disables the loopback functionality, while the default value of 1 enables
* the loopback functionality. Leaving multicast loopback enabled when it is not required
* can have a negative impact on performance. Where possible, disable 'ZMQ_MCAST_LOOP' in
* production environments.
*
* @param multicastLoop
*/
@Deprecated
public boolean setMulticastLoop(boolean multicastLoop)
{
throw new UnsupportedOperationException();
}
/**
* @return the Multicast Hops.
* @see #setMulticastHops(long)
*/
public long getMulticastHops()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_MULTICAST_HOPS);
}
/**
* Sets the time-to-live field in every multicast packet sent from this socket.
* The default is 1 which means that the multicast packets don't leave the local
* network.
*
* @param value time-to-live field in every multicast packet, default 1
*/
public boolean setMulticastHops(long value)
{
throw new UnsupportedOperationException();
}
/**
* Retrieve the timeout for recv operation on the socket.
* If the value is 0, recv will return immediately,
* with null if there is no message to receive.
* If the value is -1, it will block until a message is available.
* For all other values, it will wait for a message for that amount of time
* before returning with a null and an EAGAIN error.
*
* @return the Receive Timeout in milliseconds.
* @see #setReceiveTimeOut(int)
*/
public int getReceiveTimeOut()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_RCVTIMEO);
}
/**
* Sets the timeout for receive operation on the socket. If the value is 0, recv
* will return immediately, with null if there is no message to receive.
* If the value is -1, it will block until a message is available. For all other
* values, it will wait for a message for that amount of time before returning with
* a null and an EAGAIN error.
*
* @param value Timeout for receive operation in milliseconds. Default -1 (infinite)
* @return true if the option was set, otherwise false.
* @see #getReceiveTimeOut()
*/
public boolean setReceiveTimeOut(int value)
{
return setSocketOpt(zmq.ZMQ.ZMQ_RCVTIMEO, value);
}
/**
* Retrieve the timeout for send operation on the socket.
* If the value is 0, send will return immediately, with a false and an EAGAIN error if the message cannot be sent.
* If the value is -1, it will block until the message is sent.
* For all other values, it will try to send the message for that amount of time before returning with false and an EAGAIN error.
*
* @return the Send Timeout in milliseconds.
* @see #setSendTimeOut(int)
*/
public int getSendTimeOut()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_SNDTIMEO);
}
/**
* Sets the timeout for send operation on the socket. If the value is 0, send
* will return immediately, with a false if the message cannot be sent.
* If the value is -1, it will block until the message is sent. For all other
* values, it will try to send the message for that amount of time before
* returning with false and an EAGAIN error.
*
* @param value Timeout for send operation in milliseconds. Default -1 (infinite)
* @return true if the option was set, otherwise false.
* @see #getSendTimeOut()
*/
public boolean setSendTimeOut(int value)
{
return setSocketOpt(zmq.ZMQ.ZMQ_SNDTIMEO, value);
}
/**
* Override SO_KEEPALIVE socket option (where supported by OS) to enable keep-alive packets for a socket
* connection. Possible values are -1, 0, 1. The default value -1 will skip all overrides and do the OS default.
*
* @param value The value of 'ZMQ_TCP_KEEPALIVE' to turn TCP keepalives on (1) or off (0).
* @return true if the option was set, otherwise false.
*/
@Deprecated
public boolean setTCPKeepAlive(long value)
{
return setTCPKeepAlive(Long.valueOf(value).intValue());
}
/**
* @return the keep alive setting.
* @see #setTCPKeepAlive(long)
*/
@Deprecated
public long getTCPKeepAliveSetting()
{
return getTCPKeepAlive();
}
/**
* Override TCP_KEEPCNT socket option (where supported by OS). The default value -1 will skip all overrides and
* do the OS default.
*
* @param value The value of 'ZMQ_TCP_KEEPALIVE_CNT' defines the number of keepalives before death.
* @return true if the option was set, otherwise false.
*/
public boolean setTCPKeepAliveCount(long value)
{
return setSocketOpt(zmq.ZMQ.ZMQ_TCP_KEEPALIVE_CNT, Long.valueOf(value).intValue());
}
/**
* @return the keep alive count.
* @see #setTCPKeepAliveCount(long)
*/
public long getTCPKeepAliveCount()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_TCP_KEEPALIVE_CNT);
}
/**
* Override TCP_KEEPINTVL socket option (where supported by OS). The default value -1 will skip all overrides
* and do the OS default.
*
* @param value The value of 'ZMQ_TCP_KEEPALIVE_INTVL' defines the interval between keepalives. Unit is OS
* dependent.
* @return true if the option was set, otherwise false.
*/
public boolean setTCPKeepAliveInterval(long value)
{
return setSocketOpt(zmq.ZMQ.ZMQ_TCP_KEEPALIVE_INTVL, Long.valueOf(value).intValue());
}
/**
* @return the keep alive interval.
* @see #setTCPKeepAliveInterval(long)
*/
public long getTCPKeepAliveInterval()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_TCP_KEEPALIVE_INTVL);
}
/**
* Override TCP_KEEPCNT (or TCP_KEEPALIVE on some OS) socket option (where supported by OS). The default value
* -1 will skip all overrides and do the OS default.
*
* @param value The value of 'ZMQ_TCP_KEEPALIVE_IDLE' defines the interval between the last data packet sent
* over the socket and the first keepalive probe. Unit is OS dependent.
* @return true if the option was set, otherwise false
*/
public boolean setTCPKeepAliveIdle(long value)
{
return setSocketOpt(zmq.ZMQ.ZMQ_TCP_KEEPALIVE_IDLE, Long.valueOf(value).intValue());
}
/**
* @return the keep alive idle value.
* @see #setTCPKeepAliveIdle(long)
*/
public long getTCPKeepAliveIdle()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_TCP_KEEPALIVE_IDLE);
}
/**
* The ZMQ_SNDBUF option shall retrieve the underlying kernel transmit buffer size for the specified socket.
* For details refer to your operating system documentation for the SO_SNDBUF socket option.
*
* @return the kernel send buffer size.
* @see #setSendBufferSize(int)
*/
public int getSendBufferSize()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_SNDBUF);
}
/**
* The 'ZMQ_SNDBUF' option shall set the underlying kernel transmit buffer size for the
* 'socket' to the specified size in bytes. A value of zero means leave the OS default
* unchanged. For details please refer to your operating system documentation for the
* 'SO_SNDBUF' socket option.
*
* @param value underlying kernel transmit buffer size for the 'socket' in bytes
* A value of zero means leave the OS default unchanged.
* @return true if the option was set, otherwise false
* @see #getSendBufferSize()
* @deprecated this option uses integer range, use {@link #setSendBufferSize(int)} instead
*/
@Deprecated
public boolean setSendBufferSize(long value)
{
return setSendBufferSize(Long.valueOf(value).intValue());
}
/**
* The 'ZMQ_SNDBUF' option shall set the underlying kernel transmit buffer size for the
* 'socket' to the specified size in bytes. A value of zero means leave the OS default
* unchanged. For details please refer to your operating system documentation for the
* 'SO_SNDBUF' socket option.
*
* @param value underlying kernel transmit buffer size for the 'socket' in bytes
* A value of zero means leave the OS default unchanged.
* @return true if the option was set, otherwise false
* @see #getSendBufferSize()
*/
public boolean setSendBufferSize(int value)
{
return setSocketOpt(zmq.ZMQ.ZMQ_SNDBUF, value);
}
/**
* The ZMQ_RCVBUF option shall retrieve the underlying kernel receive buffer size for the specified socket.
* For details refer to your operating system documentation for the SO_RCVBUF socket option.
*
* @return the kernel receive buffer size.
* @see #setReceiveBufferSize(int)
*/
public int getReceiveBufferSize()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_RCVBUF);
}
/**
* The 'ZMQ_RCVBUF' option shall set the underlying kernel receive buffer size for the
* 'socket' to the specified size in bytes.
* For details refer to your operating system documentation for the 'SO_RCVBUF'
* socket option.
*
* @param value Underlying kernel receive buffer size for the 'socket' in bytes.
* A value of zero means leave the OS default unchanged.
* @return true if the option was set, otherwise false
* @see #getReceiveBufferSize()
* @deprecated this option uses integer range, use {@link #setReceiveBufferSize(int)} instead
*/
@Deprecated
public boolean setReceiveBufferSize(long value)
{
return setReceiveBufferSize(Long.valueOf(value).intValue());
}
/**
* The 'ZMQ_RCVBUF' option shall set the underlying kernel receive buffer size for the
* 'socket' to the specified size in bytes.
* For details refer to your operating system documentation for the 'SO_RCVBUF'
* socket option.
*
* @param value Underlying kernel receive buffer size for the 'socket' in bytes.
* A value of zero means leave the OS default unchanged.
* @return true if the option was set, otherwise false
* @see #getReceiveBufferSize()
*/
public boolean setReceiveBufferSize(int value)
{
return setSocketOpt(zmq.ZMQ.ZMQ_RCVBUF, value);
}
/**
* The 'ZMQ_RCVMORE' option shall return a boolean value indicating if the multi-part
* message currently being read from the specified 'socket' has more message parts to
* follow. If there are no message parts to follow or if the message currently being read is
* not a multi-part message a value of zero shall be returned. Otherwise, a value of 1 shall
* be returned.
*
* @return true if there are more messages to receive.
*/
public boolean hasReceiveMore()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_RCVMORE) == 1;
}
/**
* The 'ZMQ_FD' option shall retrieve file descriptor associated with the 0MQ
* socket. The descriptor can be used to integrate 0MQ socket into an existing
* event loop. It should never be used for anything else than polling -- such as
* reading or writing. The descriptor signals edge-triggered IN event when
* something has happened within the 0MQ socket. It does not necessarily mean that
* the messages can be read or written. Check ZMQ_EVENTS option to find out whether
* the 0MQ socket is readable or writeable.
*
* @return the underlying file descriptor.
*/
public SelectableChannel getFD()
{
return (SelectableChannel) base.getSocketOptx(zmq.ZMQ.ZMQ_FD);
}
/**
* The 'ZMQ_EVENTS' option shall retrieve event flags for the specified socket.
* If a message can be read from the socket ZMQ_POLLIN flag is set. If message can
* be written to the socket ZMQ_POLLOUT flag is set.
*
* @return the mask of outstanding events.
*/
public int getEvents()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_EVENTS);
}
/**
* The 'ZMQ_SUBSCRIBE' option shall establish a new message filter on a 'ZMQ_SUB' socket.
* Newly created 'ZMQ_SUB' sockets shall filter out all incoming messages, therefore you
* should call this option to establish an initial message filter.
* <p>
* An empty 'option_value' of length zero shall subscribe to all incoming messages. A
* non-empty 'option_value' shall subscribe to all messages beginning with the specified
* prefix. Mutiple filters may be attached to a single 'ZMQ_SUB' socket, in which case a
* message shall be accepted if it matches at least one filter.
*
* @param topic
* @return true if the option was set, otherwise false
*/
public boolean subscribe(byte[] topic)
{
return setSocketOpt(zmq.ZMQ.ZMQ_SUBSCRIBE, topic);
}
/**
* The 'ZMQ_SUBSCRIBE' option shall establish a new message filter on a 'ZMQ_SUB' socket.
* Newly created 'ZMQ_SUB' sockets shall filter out all incoming messages, therefore you
* should call this option to establish an initial message filter.
* <p>
* An empty 'option_value' of length zero shall subscribe to all incoming messages. A
* non-empty 'option_value' shall subscribe to all messages beginning with the specified
* prefix. Mutiple filters may be attached to a single 'ZMQ_SUB' socket, in which case a
* message shall be accepted if it matches at least one filter.
*
* @param topic
* @return true if the option was set, otherwise false
*/
public boolean subscribe(String topic)
{
return setSocketOpt(zmq.ZMQ.ZMQ_SUBSCRIBE, topic);
}
/**
* The 'ZMQ_UNSUBSCRIBE' option shall remove an existing message filter on a 'ZMQ_SUB'
* socket. The filter specified must match an existing filter previously established with
* the 'ZMQ_SUBSCRIBE' option. If the socket has several instances of the same filter
* attached the 'ZMQ_UNSUBSCRIBE' option shall remove only one instance, leaving the rest in
* place and functional.
*
* @param topic
* @return true if the option was set, otherwise false
*/
public boolean unsubscribe(byte[] topic)
{
return setSocketOpt(zmq.ZMQ.ZMQ_UNSUBSCRIBE, topic);
}
/**
* The 'ZMQ_UNSUBSCRIBE' option shall remove an existing message filter on a 'ZMQ_SUB'
* socket. The filter specified must match an existing filter previously established with
* the 'ZMQ_SUBSCRIBE' option. If the socket has several instances of the same filter
* attached the 'ZMQ_UNSUBSCRIBE' option shall remove only one instance, leaving the rest in
* place and functional.
*
* @param topic
* @return true if the option was set, otherwise false
*/
public boolean unsubscribe(String topic)
{
return setSocketOpt(zmq.ZMQ.ZMQ_UNSUBSCRIBE, topic);
}
/**
* Set custom Encoder
*
* @param cls
* @return true if the option was set, otherwise false
*/
@Deprecated
public boolean setEncoder(Class<? extends IEncoder> cls)
{
return setSocketOpt(zmq.ZMQ.ZMQ_ENCODER, cls);
}
/**
* Set custom Decoder
*
* @param cls
* @return true if the option was set, otherwise false
*/
@Deprecated
public boolean setDecoder(Class<? extends IDecoder> cls)
{
return setSocketOpt(zmq.ZMQ.ZMQ_DECODER, cls);
}
/**
* Sets the limit threshold where messages of a given size will be allocated using Direct ByteBuffer.
* It means that after this limit, there will be a slight penalty cost at the creation,
* but the subsequent operations will be faster.
* Set to 0 or negative to disable the threshold mechanism.
*
* @param threshold the threshold to set for the size limit of messages. 0 or negative to disable this system.
* @return true if the option was set, otherwise false.
*/
public boolean setMsgAllocationHeapThreshold(int threshold)
{
return setSocketOpt(zmq.ZMQ.ZMQ_MSG_ALLOCATION_HEAP_THRESHOLD, threshold);
}
/**
* Gets the limit threshold where messages of a given size will be allocated using Direct ByteBuffer.
* It means that after this limit, there will be a slight penalty cost at the creation,
* but the subsequent operations will be faster.
*
* @return the threshold
*/
public int getMsgAllocationHeapThreshold()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_MSG_ALLOCATION_HEAP_THRESHOLD);
}
/**
* Sets a custom message allocator.
*
* @param allocator the custom allocator.
* @return true if the option was set, otherwise false.
*/
public boolean setMsgAllocator(MsgAllocator allocator)
{
return setSocketOpt(zmq.ZMQ.ZMQ_MSG_ALLOCATOR, allocator);
}
/**
* Set a custom {@link java.nio.channels.spi.SelectorProvider} chooser.
*
* @param chooser the custom chooser.
* @return true if the option was set, otherwise false.
*/
public boolean setSelectorChooser(SelectorProviderChooser chooser)
{
return base.setSocketOpt(zmq.ZMQ.ZMQ_SELECTOR_PROVIDERCHOOSER, chooser);
}
/**
* Return the custom {@link java.nio.channels.spi.SelectorProvider} chooser.
*
* @return the {@link java.nio.channels.spi.SelectorProvider} chooser.
*/
public SelectorProviderChooser getSelectorProviderChooser()
{
return (SelectorProviderChooser) base.getSocketOptx(zmq.ZMQ.ZMQ_SELECTOR_PROVIDERCHOOSER);
}
/**
* The ZMQ_CONNECT_RID option sets the peer id of the next host connected via the connect() call,
* and immediately readies that connection for data transfer with the named id.
* This option applies only to the first subsequent call to connect(),
* calls thereafter use default connection behavior.
* Typical use is to set this socket option ahead of each connect() attempt to a new host.
* Each connection MUST be assigned a unique name. Assigning a name that is already in use is not allowed.
* Useful when connecting ROUTER to ROUTER, or STREAM to STREAM, as it allows for immediate sending to peers.
* Outbound id framing requirements for ROUTER and STREAM sockets apply.
* The peer id should be from 1 to 255 bytes long and MAY NOT start with binary zero.
*
* @param rid the peer id of the next host.
* @return true if the option was set, otherwise false.
*/
public boolean setConnectRid(String rid)
{
return setSocketOpt(zmq.ZMQ.ZMQ_CONNECT_RID, rid);
}
/**
* The ZMQ_CONNECT_RID option sets the peer id of the next host connected via the connect() call,
* and immediately readies that connection for data transfer with the named id.
* This option applies only to the first subsequent call to connect(),
* calls thereafter use default connection behavior.
* Typical use is to set this socket option ahead of each connect() attempt to a new host.
* Each connection MUST be assigned a unique name. Assigning a name that is already in use is not allowed.
* Useful when connecting ROUTER to ROUTER, or STREAM to STREAM, as it allows for immediate sending to peers.
* Outbound id framing requirements for ROUTER and STREAM sockets apply.
* The peer id should be from 1 to 255 bytes long and MAY NOT start with binary zero.
*
* @param rid the peer id of the next host.
* @return true if the option was set, otherwise false.
*/
public boolean setConnectRid(byte[] rid)
{
return setSocketOpt(zmq.ZMQ.ZMQ_CONNECT_RID, rid);
}
public boolean setRouterRaw(boolean raw)
{
return setSocketOpt(zmq.ZMQ.ZMQ_ROUTER_RAW, raw);
}
/**
* When set to true, the socket will automatically send
* an empty message when a new connection is made or accepted.
* You may set this on REQ, DEALER, or ROUTER sockets connected to a ROUTER socket.
* The application must filter such empty messages.
* The ZMQ_PROBE_ROUTER option in effect provides the ROUTER application with an event signaling the arrival of a new peer.
*
* @param probe true to send automatically an empty message when a new connection is made or accepted.
* @return true if the option was set, otherwise false.
*/
public boolean setProbeRouter(boolean probe)
{
return setSocketOpt(zmq.ZMQ.ZMQ_PROBE_ROUTER, probe);
}
/**
* Sets the ROUTER socket behavior when an unroutable message is encountered.
* A value of false is the default and discards the message silently
* when it cannot be routed or the peers SNDHWM is reached.
* A value of true returns an EHOSTUNREACH error code if the message cannot be routed
* or EAGAIN error code if the SNDHWM is reached and ZMQ_DONTWAIT was used.
* Without ZMQ_DONTWAIT it will block until the SNDTIMEO is reached or a spot in the send queue opens up.
*
* @param mandatory A value of false is the default and discards the message silently when it cannot be routed.
* A value of true returns an EHOSTUNREACH error code if the message cannot be routed.
* @return true if the option was set, otherwise false.
*/
public boolean setRouterMandatory(boolean mandatory)
{
return setSocketOpt(zmq.ZMQ.ZMQ_ROUTER_MANDATORY, mandatory);
}
/**
* If two clients use the same identity when connecting to a ROUTER,
* the results shall depend on the ZMQ_ROUTER_HANDOVER option setting.
* If that is not set (or set to the default of false),
* the ROUTER socket shall reject clients trying to connect with an already-used identity.
* If that option is set to true, the ROUTER socket shall hand-over the connection to the new client and disconnect the existing one.
*
* @param handover A value of false, (default) the ROUTER socket shall reject clients trying to connect with an already-used identity
* A value of true, the ROUTER socket shall hand-over the connection to the new client and disconnect the existing one
* @return true if the option was set, otherwise false.
*/
public boolean setRouterHandover(boolean handover)
{
return setSocketOpt(zmq.ZMQ.ZMQ_ROUTER_HANDOVER, handover);
}
/**
* Sets the XPUB socket behavior on new subscriptions and unsubscriptions.
*
* @param verbose A value of false is the default and passes only new subscription messages to upstream.
* A value of true passes all subscription messages upstream.
* @return true if the option was set, otherwise false.
*/
public boolean setXpubVerbose(boolean verbose)
{
return setSocketOpt(zmq.ZMQ.ZMQ_XPUB_VERBOSE, verbose);
}
/**
* Sets the XPUB socket behaviour to return error EAGAIN if SENDHWM is reached and the message could not be send.
* A value of false is the default and drops the message silently when the peers SNDHWM is reached.
* A value of true returns an EAGAIN error code if the SNDHWM is reached and ZMQ_DONTWAIT was used.
*
* @param noDrop
* @return true if the option was set, otherwise false.
*/
public boolean setXpubNoDrop(boolean noDrop)
{
return setSocketOpt(zmq.ZMQ.ZMQ_XPUB_NODROP, noDrop);
}
public boolean setXpubManual(boolean manual)
{
return setSocketOpt(zmq.ZMQ.ZMQ_XPUB_MANUAL, manual);
}
public boolean setXpubVerboser(boolean verboser)
{
return setSocketOpt(zmq.ZMQ.ZMQ_XPUB_VERBOSER, verboser);
}
/**
* @return the IPV4ONLY
* @see #setIPv4Only (boolean)
* @deprecated use {@link #isIPv6()} instead (inverted logic: ipv4 = true <==> ipv6 = false)
*/
@Deprecated
public boolean getIPv4Only()
{
return !isIPv6();
}
/**
* Retrieve the IPv6 option for the socket.
* A value of true means IPv6 is enabled on the socket,
* while false means the socket will use only IPv4.
* When IPv6 is enabled the socket will connect to,
* or accept connections from, both IPv4 and IPv6 hosts.
*
* @return the IPV6 configuration.
* @see #setIPv6 (boolean)
*/
public boolean isIPv6()
{
return (Boolean) base.getSocketOptx(zmq.ZMQ.ZMQ_IPV6);
}
/**
* Retrieve the IPv6 option for the socket.
* A value of true means IPv6 is enabled on the socket,
* while false means the socket will use only IPv4.
* When IPv6 is enabled the socket will connect to,
* or accept connections from, both IPv4 and IPv6 hosts.
*
* @return the IPV6 configuration.
* @see #setIPv6 (boolean)
*/
public boolean getIPv6()
{
return isIPv6();
}
/**
* The 'ZMQ_IPV4ONLY' option shall set the underlying native socket type.
* An IPv6 socket lets applications connect to and accept connections from both IPv4 and IPv6 hosts.
*
* @param v4only A value of true will use IPv4 sockets, while the value of false will use IPv6 sockets
* @return true if the option was set, otherwise false
* @deprecated use {@link #setIPv6(boolean)} instead (inverted logic: ipv4 = true <==> ipv6 = false)
*/
@Deprecated
public boolean setIPv4Only(boolean v4only)
{
return setIPv6(!v4only);
}
/**
* <p>Set the IPv6 option for the socket.</p>
* <p>A value of true means IPv6 is enabled on the socket, while false means the socket will use only IPv4.
* When IPv6 is enabled the socket will connect to, or accept connections from, both IPv4 and IPv6 hosts.</p>
* <p>The default value is false, unless the following system properties are set:</p>
* <ul>
* <li>java.net.preferIPv4Stack=false</li>
* <li>java.net.preferIPv6Addresses=true</li>
* </ul>
*
* @param v6 A value of true will use IPv6 sockets, while the value of false will use IPv4 sockets only
* @return true if the option was set, otherwise false
* @see #isIPv6()
*/
public boolean setIPv6(boolean v6)
{
return setSocketOpt(zmq.ZMQ.ZMQ_IPV6, v6);
}
/**
* @return the keep alive setting.
* @see #setTCPKeepAlive(int)
*/
public int getTCPKeepAlive()
{
return base.getSocketOpt(zmq.ZMQ.ZMQ_TCP_KEEPALIVE);
}
/**
* Override SO_KEEPALIVE socket option (where supported by OS) to enable keep-alive packets for a socket
* connection. Possible values are -1, 0, 1. The default value -1 will skip all overrides and do the OS default.
*
* @param optVal The value of 'ZMQ_TCP_KEEPALIVE' to turn TCP keepalives on (1) or off (0).
* @return true if the option was set, otherwise false
*/
public boolean setTCPKeepAlive(int optVal)
{
return setSocketOpt(zmq.ZMQ.ZMQ_TCP_KEEPALIVE, optVal);
}
/**
* @see #setDelayAttachOnConnect(boolean)
* @deprecated use {@link #setImmediate(boolean)} instead (inverted logic: immediate = true <==> delay attach on connect = false)
*/
@Deprecated
public boolean getDelayAttachOnConnect()
{
return !isImmediate();
}
/**
* Accept messages only when connections are made
* <p>
* If set to true, will delay the attachment of a pipe on connect until the underlying connection
* has completed. This will cause the socket to block if there are no other connections, but will
* prevent queues from filling on pipes awaiting connection
*
* @param value The value of 'ZMQ_DELAY_ATTACH_ON_CONNECT'. Default false.
* @return true if the option was set
* @deprecated use {@link #setImmediate(boolean)} instead (warning, the boolean is inverted)
*/
@Deprecated
public boolean setDelayAttachOnConnect(boolean value)
{
return setImmediate(!value);
}
/**
* Retrieve the state of the attach on connect value.
* If false, will delay the attachment of a pipe on connect until the underlying connection has completed.
* This will cause the socket to block if there are no other connections, but will prevent queues from filling on pipes awaiting connection.
*
* @see #setImmediate(boolean)
*/
public boolean isImmediate()
{
return (boolean) base.getSocketOptx(zmq.ZMQ.ZMQ_IMMEDIATE);
}
/**
* Retrieve the state of the attach on connect value.
* If false, will delay the attachment of a pipe on connect until the underlying connection has completed.
* This will cause the socket to block if there are no other connections, but will prevent queues from filling on pipes awaiting connection.
*
* @see #setImmediate(boolean)
*/
public boolean getImmediate()
{
return isImmediate();
}
/**
* Accept messages immediately or only when connections are made
* <p>
* By default queues will fill on outgoing connections even if the connection has not completed.
* This can lead to "lost" messages on sockets with round-robin routing (REQ, PUSH, DEALER).
* If this option is set to false, messages shall be queued only to completed connections.
* This will cause the socket to block if there are no other connections,
* but will prevent queues from filling on pipes awaiting connection.
*
* @param value The value of 'ZMQ_IMMEDIATE'. Default true.
* @return true if the option was set, otherwise false.
* @see #isImmediate()
*/
public boolean setImmediate(boolean value)
{
return setSocketOpt(zmq.ZMQ.ZMQ_IMMEDIATE, value);
}
/**
* Sets the SOCKS5 proxy address that shall be used by the socket for the TCP connection(s).
* Does not support SOCKS5 authentication.
* If the endpoints are domain names instead of addresses they shall not be resolved
* and they shall be forwarded unchanged to the SOCKS proxy service
* in the client connection request message (address type 0x03 domain name).
*
* @param proxy
* @return true if the option was set, otherwise false.
* @see #getSocksProxy()
*/
public boolean setSocksProxy(String proxy)
{
return setSocketOpt(zmq.ZMQ.ZMQ_SOCKS_PROXY, proxy);
}
/**
* Sets the SOCKS5 proxy address that shall be used by the socket for the TCP connection(s).
* Does not support SOCKS5 authentication.
* If the endpoints are domain names instead of addresses they shall not be resolved
* and they shall be forwarded unchanged to the SOCKS proxy service
* in the client connection request message (address type 0x03 domain name).
*
* @param proxy
* @return true if the option was set, otherwise false.
* @see #getSocksProxy()
*/
public boolean setSocksProxy(byte[] proxy)
{
return setSocketOpt(zmq.ZMQ.ZMQ_SOCKS_PROXY, proxy);
}
/**
* The ZMQ_SOCKS_PROXY option shall retrieve the SOCKS5 proxy address in string format.
* The returned value MAY be empty.
*
* @return the SOCKS5 proxy address in string format
* @see #setSocksProxy(byte[])
*/
public String getSocksProxy()
{
return (String) base.getSocketOptx(zmq.ZMQ.ZMQ_SOCKS_PROXY);
}
/**
* The ZMQ_LAST_ENDPOINT option shall retrieve the last endpoint bound for TCP and IPC transports.
* The returned value will be a string in the form of a ZMQ DSN.
* Note that if the TCP host is INADDR_ANY, indicated by a *, then the returned address will be 0.0.0.0 (for IPv4).
*/
public String getLastEndpoint()
{
return (String) base.getSocketOptx(zmq.ZMQ.ZMQ_LAST_ENDPOINT);
}
public boolean setZapDomain(String domain)
{
return setSocketOpt(zmq.ZMQ.ZMQ_ZAP_DOMAIN, domain);
}
public boolean setZapDomain(byte[] domain)
{
return setSocketOpt(zmq.ZMQ.ZMQ_ZAP_DOMAIN, domain);
}
/**
* The ZMQ_ZAP_DOMAIN option shall retrieve the last ZAP domain set for the socket.
* The returned value MAY be empty.
*
* @return the domain of ZAP authentication
* @see #setZapDomain(String)
*/
public String getZapDomain()
{
return (String) base.getSocketOptx(zmq.ZMQ.ZMQ_ZAP_DOMAIN);
}
public boolean setZAPDomain(String domain)
{
return setZapDomain(domain);
}
public boolean setZAPDomain(byte[] domain)
{
return setZapDomain(domain);
}
/**
* The ZMQ_ZAP_DOMAIN option shall retrieve the last ZAP domain set for the socket.
* The returned value MAY be empty.
*
* @return the domain of ZAP authentication
* @see #setZapDomain(String)
*/
public String getZAPDomain()
{
return getZapDomain();
}
/**
* Defines whether the socket will act as server for PLAIN security, see zmq_plain(7).
* A value of true means the socket will act as PLAIN server.
* A value of false means the socket will not act as PLAIN server,
* and its security role then depends on other option settings.
* Setting this to false shall reset the socket security to NULL.
*
* @param server true if the role of the socket should be server for PLAIN security.
* @return true if the option was set, otherwise false.
* @see #isAsServerPlain()
* @deprecated the naming is inconsistent with jzmq, please use {@link #setPlainServer(boolean)} instead
*/
@Deprecated
public boolean setAsServerPlain(boolean server)
{
return setPlainServer(server);
}
/**
* Defines whether the socket will act as server for PLAIN security, see zmq_plain(7).
* A value of true means the socket will act as PLAIN server.
* A value of false means the socket will not act as PLAIN server,
* and its security role then depends on other option settings.
* Setting this to false shall reset the socket security to NULL.
*
* @param server true if the role of the socket should be server for PLAIN security.
* @return true if the option was set, otherwise false.
* @see #isAsServerPlain()
*/
public boolean setPlainServer(boolean server)
{
return setSocketOpt(zmq.ZMQ.ZMQ_PLAIN_SERVER, server);
}
/**
* Returns the ZMQ_PLAIN_SERVER option, if any, previously set on the socket.
*
* @return true if the role of the socket should be server for the PLAIN mechanism.
* @see #setAsServerPlain(boolean)
* @deprecated the naming is inconsistent with jzmq, please use {@link #getPlainServer()} instead
*/
@Deprecated
public boolean isAsServerPlain()
{
return getPlainServer();
}
/**
* Returns the ZMQ_PLAIN_SERVER option, if any, previously set on the socket.
*
* @return true if the role of the socket should be server for the PLAIN mechanism.
* @see #setAsServerPlain(boolean)
* @deprecated the naming is inconsistent with jzmq, please use {@link #getPlainServer()} instead
*/
@Deprecated
public boolean getAsServerPlain()
{
return getPlainServer();
}
/**
* Returns the ZMQ_PLAIN_SERVER option, if any, previously set on the socket.
*
* @return true if the role of the socket should be server for the PLAIN mechanism.
* @see #setAsServerPlain(boolean)
*/
public boolean getPlainServer()
{
return (Boolean) base.getSocketOptx(zmq.ZMQ.ZMQ_PLAIN_SERVER);
}
/**
* Sets the username for outgoing connections over TCP or IPC.
* If you set this to a non-null value, the security mechanism used for connections shall be PLAIN, see zmq_plain(7).
* If you set this to a null value, the security mechanism used for connections shall be NULL, see zmq_null(3).
*
* @param username the username to set.
* @return true if the option was set, otherwise false.
*/
public boolean setPlainUsername(String username)
{
return base.setSocketOpt(zmq.ZMQ.ZMQ_PLAIN_USERNAME, username);
}
/**
* Sets the password for outgoing connections over TCP or IPC.
* If you set this to a non-null value, the security mechanism used for connections
* shall be PLAIN, see zmq_plain(7).
* If you set this to a null value, the security mechanism used for connections shall be NULL, see zmq_null(3).
*
* @param password the password to set.
* @return true if the option was set, otherwise false.
*/
public boolean setPlainPassword(String password)
{
return base.setSocketOpt(zmq.ZMQ.ZMQ_PLAIN_PASSWORD, password);
}
/**
* Sets the username for outgoing connections over TCP or IPC.
* If you set this to a non-null value, the security mechanism used for connections shall be PLAIN, see zmq_plain(7).
* If you set this to a null value, the security mechanism used for connections shall be NULL, see zmq_null(3).
*
* @param username the username to set.
* @return true if the option was set, otherwise false.
*/
public boolean setPlainUsername(byte[] username)
{
return base.setSocketOpt(zmq.ZMQ.ZMQ_PLAIN_USERNAME, username);
}
/**
* Sets the password for outgoing connections over TCP or IPC.
* If you set this to a non-null value, the security mechanism used for connections
* shall be PLAIN, see zmq_plain(7).
* If you set this to a null value, the security mechanism used for connections shall be NULL, see zmq_null(3).
*
* @param password the password to set.
* @return true if the option was set, otherwise false.
*/
public boolean setPlainPassword(byte[] password)
{
return base.setSocketOpt(zmq.ZMQ.ZMQ_PLAIN_PASSWORD, password);
}
/**
* The ZMQ_PLAIN_USERNAME option shall retrieve the last username
* set for the PLAIN security mechanism.
*
* @return the plain username.
*/
public String getPlainUsername()
{
return (String) base.getSocketOptx(zmq.ZMQ.ZMQ_PLAIN_USERNAME);
}
/**
* The ZMQ_PLAIN_PASSWORD option shall retrieve the last password
* set for the PLAIN security mechanism.
* The returned value MAY be empty.
*
* @return the plain password.
*/
public String getPlainPassword()
{
return (String) base.getSocketOptx(zmq.ZMQ.ZMQ_PLAIN_PASSWORD);
}
/**
* Defines whether the socket will act as server for CURVE security, see zmq_curve(7).
* A value of true means the socket will act as CURVE server.
* A value of false means the socket will not act as CURVE server,
* and its security role then depends on other option settings.
* Setting this to false shall reset the socket security to NULL.
* When you set this you must also set the server's secret key using the ZMQ_CURVE_SECRETKEY option.
* A server socket does not need to know its own public key.
*
* @param server true if the role of the socket should be server for CURVE mechanism
* @return true if the option was set
* @see #isAsServerCurve()
* @deprecated the naming is inconsistent with jzmq, please use {@link #setCurveServer(boolean)} instead
*/
@Deprecated
public boolean setAsServerCurve(boolean server)
{
return setCurveServer(server);
}
/**
* Defines whether the socket will act as server for CURVE security, see zmq_curve(7).
* A value of true means the socket will act as CURVE server.
* A value of false means the socket will not act as CURVE server,
* and its security role then depends on other option settings.
* Setting this to false shall reset the socket security to NULL.
* When you set this you must also set the server's secret key using the ZMQ_CURVE_SECRETKEY option.
* A server socket does not need to know its own public key.
*
* @param server true if the role of the socket should be server for CURVE mechanism
* @return true if the option was set
* @see #isAsServerCurve()
*/
public boolean setCurveServer(boolean server)
{
return setSocketOpt(zmq.ZMQ.ZMQ_CURVE_SERVER, server);
}
/**
* Tells if the socket will act as server for CURVE security.
*
* @return true if the role of the socket should be server for CURVE mechanism.
* @see #setAsServerCurve(boolean)
* @deprecated the naming is inconsistent with jzmq, please use {@link #getCurveServer()} instead
*/
@Deprecated
public boolean isAsServerCurve()
{
return getCurveServer();
}
/**
* Tells if the socket will act as server for CURVE security.
*
* @return true if the role of the socket should be server for CURVE mechanism.
* @see #setAsServerCurve(boolean)
*/
public boolean getCurveServer()
{
return (boolean) base.getSocketOptx(zmq.ZMQ.ZMQ_CURVE_SERVER);
}
/**
* Tells if the socket will act as server for CURVE security.
*
* @return true if the role of the socket should be server for CURVE mechanism.
* @see #setAsServerCurve(boolean)
* @deprecated the naming is inconsistent with jzmq, please use {@link #getCurveServer()} instead
*/
@Deprecated
public boolean getAsServerCurve()
{
return getCurveServer();
}
/**
* Sets the socket's long term public key.
* You must set this on CURVE client sockets, see zmq_curve(7).
* You can provide the key as 32 binary bytes, or as a 40-character string
* encoded in the Z85 encoding format.
* The public key must always be used with the matching secret key.
* To generate a public/secret key pair,
* use {@link zmq.io.mechanism.curve.Curve#keypair()} or {@link zmq.io.mechanism.curve.Curve#keypairZ85()}.
*
* @param key the curve public key
* @return true if the option was set, otherwise false
* @see #getCurvePublicKey()
*/
public boolean setCurvePublicKey(byte[] key)
{
return setSocketOpt(zmq.ZMQ.ZMQ_CURVE_PUBLICKEY, key);
}
/**
* Sets the socket's long term server key.
* You must set this on CURVE client sockets, see zmq_curve(7).
* You can provide the key as 32 binary bytes, or as a 40-character string
* encoded in the Z85 encoding format.
* This key must have been generated together with the server's secret key.
* To generate a public/secret key pair,
* use {@link zmq.io.mechanism.curve.Curve#keypair()} or {@link zmq.io.mechanism.curve.Curve#keypairZ85()}.
*
* @param key the curve server key
* @return true if the option was set, otherwise false
* @see #getCurveServerKey()
*/
public boolean setCurveServerKey(byte[] key)
{
return setSocketOpt(zmq.ZMQ.ZMQ_CURVE_SERVERKEY, key);
}
/**
* Sets the socket's long term secret key.
* You must set this on both CURVE client and server sockets, see zmq_curve(7).
* You can provide the key as 32 binary bytes, or as a 40-character string
* encoded in the Z85 encoding format.
* To generate a public/secret key pair,
* use {@link zmq.io.mechanism.curve.Curve#keypair()} or {@link zmq.io.mechanism.curve.Curve#keypairZ85()}.
*
* @param key the curve secret key
* @return true if the option was set, otherwise false
* @see #getCurveSecretKey()
*/
public boolean setCurveSecretKey(byte[] key)
{
return setSocketOpt(zmq.ZMQ.ZMQ_CURVE_SECRETKEY, key);
}
/**
* Retrieves the current long term public key for the socket in binary format of 32 bytes.
*
* @return key the curve public key
* @see #setCurvePublicKey(byte[])
*/
public byte[] getCurvePublicKey()
{
return (byte[]) base.getSocketOptx(zmq.ZMQ.ZMQ_CURVE_PUBLICKEY);
}
/**
* Retrieves the current server key for the socket in binary format of 32 bytes.
*
* @return key the curve server key
* @see #setCurveServerKey(byte[])
*/
public byte[] getCurveServerKey()
{
return (byte[]) base.getSocketOptx(zmq.ZMQ.ZMQ_CURVE_SERVERKEY);
}
/**
* Retrieves the current long term secret key for the socket in binary format of 32 bytes.
*
* @return key the curve secret key
* @see #setCurveSecretKey(byte[])
*/
public byte[] getCurveSecretKey()
{
return (byte[]) base.getSocketOptx(zmq.ZMQ.ZMQ_CURVE_SECRETKEY);
}
/**
* The ZMQ_MECHANISM option shall retrieve the current security mechanism for the socket.
*
* @return the current mechanism.
*/
public Mechanism getMechanism()
{
return Mechanism.find((Mechanisms) base.getSocketOptx(zmq.ZMQ.ZMQ_MECHANISM));
}
/**
* When set, the socket will automatically send a hello message when a new connection is made or accepted.
* You may set this on DEALER or ROUTER sockets.
* The combination with ZMQ_HEARTBEAT_IVL is powerful and simplify protocols,
* as now heartbeat and sending the hello message can be left out of protocols and be handled by zeromq.
*
* @param helloMsg
* @return true if the option was set, otherwise false
*/
public boolean setHelloMsg(byte[] helloMsg)
{
return setSocketOpt(zmq.ZMQ.ZMQ_HELLO_MSG, helloMsg);
}
/**
* Bind to network interface. Start listening for new connections.
*
* @param addr the endpoint to bind to.
* @return true if the socket was bound, otherwise false.
*/
public boolean bind(String addr)
{
boolean rc = base.bind(addr);
mayRaise();
return rc;
}
/**
* Bind to network interface to a random port. Start listening for new
* connections.
*
* @param addr the endpoint to bind to.
*/
public int bindToRandomPort(String addr)
{
return bindToRandomPort(addr, DYNFROM, DYNTO);
}
/**
* Bind to network interface to a random port. Start listening for new
* connections.
*
* @param addr the endpoint to bind to.
* @param min The minimum port in the range of ports to try.
* @param max The maximum port in the range of ports to try.
*/
public int bindToRandomPort(String addr, int min, int max)
{
int port;
Random rand = new Random();
// int port = min;
// while (port <= max) {
for (int i = 0; i < 100; i++) { // hardcoded to 100 tries. should this be parametrised
port = rand.nextInt(max - min + 1) + min;
if (base.bind(String.format("%s:%s", addr, port))) {
base.errno.set(0);
return port;
}
// port++;
}
throw new ZMQException("Could not bind socket to random port.", ZError.EADDRINUSE);
}
public boolean connect(String addr)
{
boolean rc = base.connect(addr);
mayRaise();
return rc;
}
/**
* Disconnect from remote application.
*
* @param addr the endpoint to disconnect from.
* @return true if successful.
*/
public boolean disconnect(String addr)
{
return base.termEndpoint(addr);
}
/**
* Stop accepting connections on a socket.
*
* @param addr the endpoint to unbind from.
* @return true if successful.
*/
public boolean unbind(String addr)
{
return base.termEndpoint(addr);
}
public boolean send(String data)
{
return send(data.getBytes(CHARSET), 0);
}
public boolean sendMore(String data)
{
return send(data.getBytes(CHARSET), zmq.ZMQ.ZMQ_SNDMORE);
}
public boolean send(String data, int flags)
{
return send(data.getBytes(CHARSET), flags);
}
public boolean send(byte[] data)
{
return send(data, 0);
}
public boolean sendMore(byte[] data)
{
return send(data, zmq.ZMQ.ZMQ_SNDMORE);
}
public boolean send(byte[] data, int flags)
{
zmq.Msg msg = new zmq.Msg(data);
if (base.send(msg, flags)) {
return true;
}
mayRaise();
return false;
}
public boolean send(byte[] data, int off, int length, int flags)
{
byte[] copy = new byte[length];
System.arraycopy(data, off, copy, 0, length);
zmq.Msg msg = new zmq.Msg(copy);
if (base.send(msg, flags)) {
return true;
}
mayRaise();
return false;
}
/**
* Queues a message created from data, so it can be sent.
*
* @param data ByteBuffer payload
* @param flags a combination (with + or |) of the flags defined below:
* <ul>
* <li>{@link org.zeromq.ZMQ#DONTWAIT DONTWAIT}:
* For socket types ({@link org.zeromq.ZMQ#DEALER DEALER}, {@link org.zeromq.ZMQ#PUSH PUSH})
* that block when there are no available peers (or all peers have full high-water mark),
* specifies that the operation should be performed in non-blocking mode.
* If the message cannot be queued on the socket, the method shall fail with errno set to EAGAIN.</li>
* <li>{@link org.zeromq.ZMQ#SNDMORE SNDMORE}:
* Specifies that the message being sent is a multi-part message,
* and that further message parts are to follow.</li>
* <li>0 : blocking send of a single-part message or the last of a multi-part message</li>
* </ul>
* @return the number of bytes queued, -1 on error
*/
public int sendByteBuffer(ByteBuffer data, int flags)
{
zmq.Msg msg = new zmq.Msg(data);
if (base.send(msg, flags)) {
return msg.size();
}
mayRaise();
return -1;
}
/**
* Queues a 'picture' message to the socket (or actor), so it can be sent.
*
* @param picture The picture is a string that defines the type of each frame.
* This makes it easy to send a complex multiframe message in
* one call. The picture can contain any of these characters,
* each corresponding to zero or one arguments:
*
* <table>
* <caption> </caption>
* <tr><td>i = int (stores signed integer)</td></tr>
* <tr><td>1 = byte (stores 8-bit unsigned integer)</td></tr>
* <tr><td>2 = int (stores 16-bit unsigned integer)</td></tr>
* <tr><td>4 = long (stores 32-bit unsigned integer)</td></tr>
* <tr><td>8 = long (stores 64-bit unsigned integer)</td></tr>
* <tr><td>s = String</td></tr>
* <tr><td>b = byte[]</td></tr>
* <tr><td>f = ZFrame</td></tr>
* <tr><td>m = ZMsg (sends all frames in the ZMsg)</td></tr>
* <tr><td>z = sends zero-sized frame (0 arguments)</td></tr>
* </table>
* Note that s, b, f and m are encoded the same way and the choice is
* offered as a convenience to the sender, which may or may not already
* have data in a ZFrame or ZMsg. Does not change or take ownership of
* any arguments.
*
* Also see {@link #recvPicture(String)}} how to recv a
* multiframe picture.
* @param args Arguments according to the picture
* @return true if successful, false if sending failed for any reason
*/
@Draft
public boolean sendPicture(String picture, Object... args)
{
return new ZPicture().sendPicture(this, picture, args);
}
@Draft
public boolean sendBinaryPicture(String picture, Object... args)
{
return new ZPicture().sendBinaryPicture(this, picture, args);
}
/**
* Receives a message.
*
* @return the message received, as an array of bytes; null on error.
*/
public byte[] recv()
{
return recv(0);
}
/**
* Receives a message.
*
* @param flags either:
* <ul>
* <li>{@link org.zeromq.ZMQ#DONTWAIT DONTWAIT}:
* Specifies that the operation should be performed in non-blocking mode.
* If there are no messages available on the specified socket,
* the method shall fail with errno set to EAGAIN and return null.</li>
* <li>0 : receive operation blocks until one message is successfully retrieved,
* or stops when timeout set by {@link #setReceiveTimeOut(int)} expires.</li>
* </ul>
* @return the message received, as an array of bytes; null on error.
*/
public byte[] recv(int flags)
{
zmq.Msg msg = base.recv(flags);
if (msg != null) {
return msg.data();
}
mayRaise();
return null;
}
/**
* Receives a message in to a specified buffer.
*
* @param buffer byte[] to copy zmq message payload in to.
* @param offset offset in buffer to write data
* @param len max bytes to write to buffer.
* If len is smaller than the incoming message size,
* the message will be truncated.
* @param flags either:
* <ul>
* <li>{@link org.zeromq.ZMQ#DONTWAIT DONTWAIT}:
* Specifies that the operation should be performed in non-blocking mode.
* If there are no messages available on the specified socket,
* the method shall fail with errno set to EAGAIN and return null.</li>
* <li>0 : receive operation blocks until one message is successfully retrieved,
* or stops when timeout set by {@link #setReceiveTimeOut(int)} expires.</li>
* </ul>
* @return the number of bytes read, -1 on error
*/
public int recv(byte[] buffer, int offset, int len, int flags)
{
zmq.Msg msg = base.recv(flags);
if (msg != null) {
return msg.getBytes(0, buffer, offset, len);
}
return -1;
}
/**
* Receives a message into the specified ByteBuffer.
*
* @param buffer the buffer to copy the zmq message payload into
* @param flags either:
* <ul>
* <li>{@link org.zeromq.ZMQ#DONTWAIT DONTWAIT}:
* Specifies that the operation should be performed in non-blocking mode.
* If there are no messages available on the specified socket,
* the method shall fail with errno set to EAGAIN and return null.</li>
* <li>0 : receive operation blocks until one message is successfully retrieved,
* or stops when timeout set by {@link #setReceiveTimeOut(int)} expires.</li>
* </ul>
* @return the number of bytes read, -1 on error
*/
public int recvByteBuffer(ByteBuffer buffer, int flags)
{
zmq.Msg msg = base.recv(flags);
if (msg != null) {
buffer.put(msg.buf());
return msg.size();
}
mayRaise();
return -1;
}
/**
* @return the message received, as a String object; null on no message.
*/
public String recvStr()
{
return recvStr(0);
}
/**
* Receives a message as a string.
*
* @param flags either:
* <ul>
* <li>{@link org.zeromq.ZMQ#DONTWAIT DONTWAIT}:
* Specifies that the operation should be performed in non-blocking mode.
* If there are no messages available on the specified socket,
* the method shall fail with errno set to EAGAIN and return null.</li>
* <li>0 : receive operation blocks until one message is successfully retrieved,
* or stops when timeout set by {@link #setReceiveTimeOut(int)} expires.</li>
* </ul>
* @return the message received, as a String object; null on no message.
*/
public String recvStr(int flags)
{
byte[] msg = recv(flags);
if (msg != null) {
return new String(msg, CHARSET);
}
return null;
}
/**
* Receive a 'picture' message to the socket (or actor).
*
*
* @param picture The picture is a string that defines the type of each frame.
* This makes it easy to recv a complex multiframe message in
* one call. The picture can contain any of these characters,
* each corresponding to zero or one elements in the result:
*
* <table>
* <caption> </caption>
* <tr><td>i = int (stores signed integer)</td></tr>
* <tr><td>1 = int (stores 8-bit unsigned integer)</td></tr>
* <tr><td>2 = int (stores 16-bit unsigned integer)</td></tr>
* <tr><td>4 = long (stores 32-bit unsigned integer)</td></tr>
* <tr><td>8 = long (stores 64-bit unsigned integer)</td></tr>
* <tr><td>s = String</td></tr>
* <tr><td>b = byte[]</td></tr>
* <tr><td>f = ZFrame (creates zframe)</td></tr>
* <tr><td>m = ZMsg (creates a zmsg with the remaing frames)</td></tr>
* <tr><td>z = null, asserts empty frame (0 arguments)</td></tr>
* </table>
*
* Also see {@link #sendPicture(String, Object...)} how to send a
* multiframe picture.
*
* @return the picture elements as object array
*/
@Draft
public Object[] recvPicture(String picture)
{
return new ZPicture().recvPicture(this, picture);
}
/**
* Receive a binary encoded 'picture' message from the socket (or actor).
* This method is similar to {@link #recv()}, except the arguments are encoded
* in a binary format that is compatible with zproto, and is designed to
* reduce memory allocations.
*
* @param picture The picture argument is a string that defines
* the type of each argument. See {@link #sendBinaryPicture(String, Object...)}
* for the supported argument types.
* @return the picture elements as object array
**/
@Draft
public Object[] recvBinaryPicture(final String picture)
{
return new ZPicture().recvBinaryPicture(this, picture);
}
/**
* Start a monitoring socket where events can be received.
* <p>
* Lets an application thread track socket events (like connects) on a ZeroMQ socket.
* Each call to this method creates a {@link ZMQ#PAIR} socket and binds that to the specified inproc:// endpoint.
* To collect the socket events, you must create your own PAIR socket, and connect that to the endpoint.
* <br>
* Supports only connection-oriented transports, that is, TCP, IPC.
*
* @param addr the endpoint to receive events from. (must be inproc transport)
* @param events the events of interest. A bitmask of the socket events you wish to monitor. To monitor all events, use the event value {@link ZMQ#EVENT_ALL}.
* @return true if monitor socket setup is successful
* @throws ZMQException
*/
public boolean monitor(String addr, int events)
{
return base.monitor(addr, events);
}
private void mayRaise()
{
int errno = base.errno();
if (errno != 0 && errno != zmq.ZError.EAGAIN) {
throw new ZMQException(errno);
}
}
public int errno()
{
return base.errno();
}
@Override
public String toString()
{
return base.toString();
}
public enum Mechanism
{
NULL(Mechanisms.NULL),
PLAIN(Mechanisms.PLAIN),
CURVE(Mechanisms.CURVE);
// TODO add GSSAPI once it is implemented
private final Mechanisms mech;
Mechanism(Mechanisms zmq)
{
this.mech = zmq;
}
private static Mechanism find(Mechanisms mech)
{
for (Mechanism candidate : values()) {
if (candidate.mech == mech) {
return candidate;
}
}
return null;
}
}
}
/**
* Provides a mechanism for applications to multiplex input/output events in a level-triggered fashion over a set of sockets
*/
public static class Poller implements Closeable
{
public static final int POLLIN = zmq.ZMQ.ZMQ_POLLIN;
public static final int POLLOUT = zmq.ZMQ.ZMQ_POLLOUT;
public static final int POLLERR = zmq.ZMQ.ZMQ_POLLERR;
private static final int SIZE_DEFAULT = 32;
private static final int SIZE_INCREMENT = 16;
private final Selector selector;
private final Context context;
private List<PollItem> items;
private long timeout;
/**
* Class constructor.
*
* @param context a 0MQ context previously created.
* @param size the number of Sockets this poller will contain.
*/
protected Poller(Context context, int size)
{
assert (context != null);
this.context = context;
selector = context.selector();
assert (selector != null);
items = new ArrayList<>(size);
timeout = -1L;
}
/**
* Class constructor.
*
* @param context a 0MQ context previously created.
*/
protected Poller(Context context)
{
this(context, SIZE_DEFAULT);
}
@Override
public void close()
{
context.close(selector);
}
/**
* Register a Socket for polling on all events.
*
* @param socket the Socket we are registering.
* @return the index identifying this Socket in the poll set.
*/
public int register(Socket socket)
{
return register(socket, POLLIN | POLLOUT | POLLERR);
}
/**
* Register a Channel for polling on all events.
*
* @param channel the Channel we are registering.
* @return the index identifying this Channel in the poll set.
*/
public int register(SelectableChannel channel)
{
return register(channel, POLLIN | POLLOUT | POLLERR);
}
/**
* Register a Socket for polling on the specified events.
* <p>
* Automatically grow the internal representation if needed.
*
* @param socket the Socket we are registering.
* @param events a mask composed by XORing POLLIN, POLLOUT and POLLERR.
* @return the index identifying this Socket in the poll set.
*/
public int register(Socket socket, int events)
{
return registerInternal(new PollItem(socket, events));
}
/**
* Register a Socket for polling on the specified events.
* <p>
* Automatically grow the internal representation if needed.
*
* @param channel the Channel we are registering.
* @param events a mask composed by XORing POLLIN, POLLOUT and POLLERR.
* @return the index identifying this Channel in the poll set.
*/
public int register(SelectableChannel channel, int events)
{
return registerInternal(new PollItem(channel, events));
}
/**
* Register a Channel for polling on the specified events.
* <p>
* Automatically grow the internal representation if needed.
*
* @param item the PollItem we are registering.
* @return the index identifying this Channel in the poll set.
*/
public int register(PollItem item)
{
return registerInternal(item);
}
/**
* Register a Socket for polling on the specified events.
* <p>
* Automatically grow the internal representation if needed.
*
* @param item the PollItem we are registering.
* @return the index identifying this Socket in the poll set.
*/
private int registerInternal(PollItem item)
{
items.add(item);
return items.size() - 1;
}
/**
* Unregister a Socket for polling on the specified events.
*
* @param socket the Socket to be unregistered
*/
public void unregister(Socket socket)
{
unregisterInternal(socket);
}
/**
* Unregister a Socket for polling on the specified events.
*
* @param channel the Socket to be unregistered
*/
public void unregister(SelectableChannel channel)
{
unregisterInternal(channel);
}
/**
* Unregister a Socket for polling on the specified events.
*
* @param socket the Socket to be unregistered
*/
private void unregisterInternal(Object socket)
{
for (Iterator<PollItem> it = items.iterator(); it.hasNext(); ) {
PollItem item = it.next();
if (item.socket == socket || item.getRawSocket() == socket) {
it.remove();
}
}
}
/**
* Get the PollItem associated with an index.
*
* @param index the desired index.
* @return the PollItem associated with that index (or null).
*/
public PollItem getItem(int index)
{
if (index < 0 || index >= items.size()) {
return null;
}
return this.items.get(index);
}
/**
* Get the socket associated with an index.
*
* @param index the desired index.
* @return the Socket associated with that index (or null).
*/
public Socket getSocket(int index)
{
if (index < 0 || index >= items.size()) {
return null;
}
return items.get(index).socket;
}
/**
* Get the current poll timeout.
*
* @return the current poll timeout in milliseconds.
* @deprecated Timeout handling has been moved to the poll() methods.
*/
@Deprecated
public long getTimeout()
{
return this.timeout;
}
/**
* Set the poll timeout.
*
* @param timeout the desired poll timeout in milliseconds.
* @deprecated Timeout handling has been moved to the poll() methods.
*/
@Deprecated
public void setTimeout(long timeout)
{
if (timeout >= -1L) {
this.timeout = timeout;
}
}
/**
* Get the current poll set size.
*
* @return the current poll set size.
*/
public int getSize()
{
return items.size();
}
/**
* Get the index for the next position in the poll set size.
*
* @deprecated use getSize instead
* @return the index for the next position in the poll set size.
*/
@Deprecated
public int getNext()
{
return items.size();
}
/**
* Issue a poll call. If the poller's internal timeout value
* has been set, use that value as timeout; otherwise, block
* indefinitely.
*
* @return how many objects where signaled by poll ().
*/
public int poll()
{
long tout = -1L;
if (this.timeout > -1L) {
tout = this.timeout;
}
return poll(tout);
}
public int poll(long tout)
{
if (tout < -1) {
return 0;
}
if (items.isEmpty()) {
return 0;
}
zmq.poll.PollItem[] pollItems = new zmq.poll.PollItem[items.size()];
for (int i = 0, j = 0; i < items.size(); i++) {
if (items.get(i) != null) {
pollItems[j++] = items.get(i).base;
}
}
try {
return zmq.ZMQ.poll(selector, pollItems, items.size(), tout);
}
catch (ZError.IOException e) {
if (context.isTerminated()) {
return 0;
}
else {
throw (e);
}
}
}
/**
* Check whether the specified element in the poll set was signaled for input.
*
* @param index of element
* @return true if the element was signaled.
*/
public boolean pollin(int index)
{
if (index < 0 || index >= items.size()) {
return false;
}
return items.get(index).isReadable();
}
/**
* Check whether the specified element in the poll set was signaled for output.
*
* @param index of element
* @return true if the element was signaled.
*/
public boolean pollout(int index)
{
if (index < 0 || index >= items.size()) {
return false;
}
return items.get(index).isWritable();
}
/**
* Check whether the specified element in the poll set was signaled for error.
*
* @param index of element
* @return true if the element was signaled.
*/
public boolean pollerr(int index)
{
if (index < 0 || index >= items.size()) {
return false;
}
return items.get(index).isError();
}
}
public static class PollItem
{
private final zmq.poll.PollItem base;
private final Socket socket;
public PollItem(Socket socket, int ops)
{
this.socket = socket;
base = new zmq.poll.PollItem(socket.base, ops);
}
public PollItem(SelectableChannel channel, int ops)
{
base = new zmq.poll.PollItem(channel, ops);
socket = null;
}
final zmq.poll.PollItem base()
{
return base;
}
public final SelectableChannel getRawSocket()
{
return base.getRawSocket();
}
public final Socket getSocket()
{
return socket;
}
public final boolean isReadable()
{
return base.isReadable();
}
public final boolean isWritable()
{
return base.isWritable();
}
public final boolean isError()
{
return base.isError();
}
public final int readyOps()
{
return base.readyOps();
}
@Override
public int hashCode()
{
return base.hashCode();
}
@Override
public boolean equals(Object obj)
{
if (!(obj instanceof PollItem)) {
return false;
}
PollItem target = (PollItem) obj;
if (socket != null && socket == target.socket) {
return true;
}
if (getRawSocket() != null && getRawSocket() == target.getRawSocket()) {
return true;
}
return false;
}
}
/**
* Inner class: Event.
* Monitor socket event class
*/
public static class Event
{
private final int event;
private final Object value;
private final String address;
public Event(int event, Object value, String address)
{
this.event = event;
this.value = value;
this.address = address;
}
/**
* Receive an event from a monitor socket.
*
* @param socket the socket
* @param flags the flags to apply to the receive operation.
* @return the received event or null if no message was received.
* @throws ZMQException
*/
public static Event recv(Socket socket, int flags)
{
zmq.ZMQ.Event e = zmq.ZMQ.Event.read(socket.base, flags);
return e != null ? new Event(e.event, e.arg, e.addr) : null;
}
/**
* Receive an event from a monitor socket.
* Does a blocking recv.
*
* @param socket the socket
* @return the received event.
* @throws ZMQException
*/
public static Event recv(Socket socket)
{
return Event.recv(socket, 0);
}
public int getEvent()
{
return event;
}
public Object getValue()
{
return value;
}
public String getAddress()
{
return address;
}
}
public static class Curve
{
public static final int KEY_SIZE = Options.CURVE_KEYSIZE;
public static final int KEY_SIZE_Z85 = Options.CURVE_KEYSIZE_Z85;
/**
* <p>Returns a newly generated random keypair consisting of a public key
* and a secret key.</p>
*
* <p>The keys are encoded using {@link #z85Encode}.</p>
*
* @return Randomly generated {@link KeyPair}
*/
public static KeyPair generateKeyPair()
{
String[] keys = new zmq.io.mechanism.curve.Curve().keypairZ85();
return new KeyPair(keys[0], keys[1]);
}
/**
* <p>The function shall decode given key encoded as Z85 string into byte array.</p>
* <p>The length of string shall be divisible by 5.</p>
* <p>The decoding shall follow the ZMQ RFC 32 specification.</p>
*
* @param key Key to be decoded
* @return The resulting key as byte array
*/
public static byte[] z85Decode(String key)
{
return Z85.decode(key);
}
/**
* <p>Encodes the binary block specified by data into a string.</p>
* <p>The size of the binary block must be divisible by 4.</p>
* <p>A 32-byte CURVE key is encoded as 40 ASCII characters plus a null terminator.</p>
* <p>The function shall encode the binary block specified into a string.</p>
* <p>The encoding shall follow the ZMQ RFC 32 specification.</p>
*
* @param key Key to be encoded
* @return The resulting key as String in Z85
*/
public static String z85Encode(byte[] key)
{
return zmq.io.mechanism.curve.Curve.z85EncodePublic(key);
}
/**
* A container for a public and a corresponding secret key.
* Keys have to be encoded in Z85 format.
*/
public static class KeyPair
{
/**
* Z85-encoded public key.
*/
public final String publicKey;
/**
* Z85-encoded secret key.
*/
public final String secretKey;
public KeyPair(final String publicKey, final String secretKey)
{
Utils.checkArgument(publicKey != null, "Public key cannot be null");
Utils.checkArgument(publicKey.length() == Curve.KEY_SIZE_Z85, "Public key has to be Z85 format");
Utils.checkArgument(secretKey == null || secretKey.length() == Curve.KEY_SIZE_Z85, "Secret key has to be null or in Z85 format");
this.publicKey = publicKey;
this.secretKey = secretKey;
}
}
}
}
|
package mondrian.olap;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.*;
/**
* MDX query.
**/
public class Query extends QueryPart implements NameResolver {
// enum axisType
public static final int noAxis = -2;
public static final int slicerAxis = -1;
public static final int xAxis = 0;
public static final int yAxis = 1;
public static final EnumeratedValues axisEnum = new EnumeratedValues(
new String[] {"none", "slicer", "x", "y"},
new int[] {-2, -1, 0, 1});
// enum sortDirection
public static final int ascDirection = 0; // ascending inside hierarchy
public static final int descDirection = 1; // descending inside hierarchy
public static final int bascDirection = 2; // ascending disregarding hierarchy
public static final int bdescDirection = 3; // descending disregarding hierarchy
public static final int noneDirection = -1;
public static final EnumeratedValues directionEnum = new EnumeratedValues(
new String[] {"ascending", "descending", "none"});
//hidden string
public static final String hidden = "hidden_";
public Formula formulas[];
public QueryAxis axes[];
public Cube mdxCube;
public Exp slicer;
public QueryPart cellProps[];
private Parameter parameters[]; // stores definitions of parameters
private Connection connection;
public Query()
{}
/** Constructs a Query. */
public Query(
Connection connection, Formula[] formulas, QueryAxis[] axes,
String cube, Exp slicer, QueryPart[] cellProps)
{
this(
connection,
connection.getSchema().lookupCube(cube, true),
formulas, axes, slicer, cellProps, new Parameter[0]);
}
/** Construct a Query; called from clone(). */
public Query(
Connection connection, Cube mdxCube,
Formula[] formulas, QueryAxis[] axes, Exp slicer,
QueryPart[] cellProps, Parameter[] parameters) {
this.connection = connection;
this.mdxCube = mdxCube;
this.formulas = formulas;
this.axes = axes;
normalizeAxes();
setSlicer(slicer);
this.cellProps = cellProps;
this.parameters = parameters;
resolve(this); // resolve self and children
resolveParameters(); //calculate parameter's usage in query
}
public Object clone() throws CloneNotSupportedException
{
return new Query(
connection, mdxCube,
Formula.cloneArray(formulas), QueryAxis.cloneArray(axes),
slicer == null ? null : (Exp) slicer.clone(), null,
Parameter.cloneArray(parameters));
}
public Query safeClone()
{
try {
return (Query) clone();
} catch (CloneNotSupportedException e) {
throw Util.getRes().newInternal("Query.clone() failed", e);
}
}
public Connection getConnection() {
return connection;
}
public static final String[] axisNames = {
"COLUMNS", "ROWS", "PAGES", "CHAPTERS", "SECTIONS"
};
private void normalizeAxes()
{
for (int i = 0; i < axes.length; i++) {
String correctName = axisNames[i];
if (!axes[i].axisName.equalsIgnoreCase(correctName)) {
boolean found = false;
for (int j = i + 1; j < axes.length; j++) {
if (axes[j].axisName.equalsIgnoreCase(correctName)) {
// swap axes
QueryAxis temp = axes[i];
axes[i] = axes[j];
axes[j] = temp;
found = true;
break;
}
}
}
}
}
public QueryPart resolve(Query q)
{
if (formulas != null) {
//resolving of formulas should be done in two parts
//because formulas might depend on each other, so all calculated
//mdx elements have to be defined during resolve
for (int i = 0; i < formulas.length; i++)
formulas[i] = (Formula) formulas[i].createElement(q);
for (int i = 0; i < formulas.length; i++)
formulas[i] = (Formula) formulas[i].resolve(q);
}
if (axes != null)
for (int i = 0; i < axes.length; i++)
axes[i] = (QueryAxis) axes[i].resolve(q);
if (slicer != null) {
setSlicer((Exp) slicer.resolve(q));
}
// Now that out Parameters have been created (from FunCall's to
// Parameter() and ParamRef()), resolve them.
for (int i = 0; i < parameters.length; i++)
parameters[i] = (Parameter) parameters[i].resolve(q);
return this;
}
public void unparse(PrintWriter pw, ElementCallback callback)
{
callback.disableHiddenNameLookup(true);
if (formulas != null) {
for (int i = 0; i < formulas.length; i++) {
if (i == 0) {
pw.print("with ");
} else {
pw.print(" ");
}
formulas[i].unparse(pw, callback);
pw.println();
}
}
callback.disableHiddenNameLookup(false);
pw.print("select ");
if (axes != null) {
for (int i = 0; i < axes.length; i++) {
axes[i].axisOrdinal = i;
axes[i].unparse(pw, callback);
if (i < axes.length - 1) {
pw.println(", ");
pw.print(" ");
} else {
pw.println();
}
}
}
if (mdxCube != null) {
String cubeName = null;
cubeName = callback.registerItself(mdxCube);
if (cubeName == null) {
cubeName = mdxCube.getName();
}
pw.println("from [" + cubeName + "]");
}
if (slicer != null) {
pw.print("where ");
slicer.unparse(pw, callback);
pw.println();
}
}
/** This class tells {@link #unparse} to expand parameters, because the
* query is intended for Plato and substitute hidden members with existing
* ones */
class PlatoCallBack extends ElementCallback {
/** Maps between existing and hidden members **/
HashMap hiddenNames = new HashMap();
boolean disableLookup = false;
public PlatoCallBack() {
}
/**creates PlatoCallBack object and initializes hiddenMembers mapping,
* using names of formulas. Later, this.hiddenMembers will be used to
* find hidden names for existing ones. We use this substitution to
* allow formatting of existing members. If do not need to use this
* feature call other constructor*/
public PlatoCallBack(Formula formulas[])
{
if (formulas == null) {
return;
}
for (int i = 0; i < formulas.length; i++) {
if (!formulas[i].isHidden())
continue;
String hiddenName = formulas[i].getUniqueName();
int offset = hiddenName.indexOf(Query.hidden);
String name = hiddenName.substring(0, offset) +
hiddenName.substring(offset + Query.hidden.length());
hiddenNames.put(name, hiddenName);
}
}
public boolean isPlatoMdx()
{return true;}
/** returns hiddenName for given uName if it exists. This feature is
* used for formatting existing measures*/
public String findHiddenName(String uName)
{
if (disableLookup) {
return null;
}
return (String) hiddenNames.get(uName);
}
/** disables or enables hidden name lookup*/
public void disableHiddenNameLookup(boolean disableLookup) {
this.disableLookup = disableLookup;
}
}
public String toPlatoMdx()
{
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
ElementCallback callback = new PlatoCallBack(formulas);
unparse(pw, callback);
return sw.toString();
}
public String toWebUIMdx()
{
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
ElementCallback callback = new ElementCallback();
unparse(pw, callback);
resetParametersPrintProperty();
return sw.toString();
}
/**
* Returns the axis which the result axis is based on, taking into account
* any axis re-ordering.
*
* <p>Suppose that they've written
* <pre>select {} on rows, {} on pages from Sales</pre>
*
* <p>Then we will execute
* <pre>select {} on columns, {} on rows from Sales</pre>
*
* getLogicalAxis(0) = 1, meaning that axis 0 of the Plato cellset matches
* the rows (1) axis of their query; likewise, getLogicalAxis(1) = 2.
*
* @param ordinal of axis in cellset
* @return axis label in original query (0 = columns, 1 = rows, etc.)
*/
public int getLogicalAxis(int iPhysicalAxis)
{
if (iPhysicalAxis == slicerAxis || iPhysicalAxis == axes.length) {
return slicerAxis; // slicer is never permuted
}
String axisName = axes[iPhysicalAxis].axisName;
for (int i = 0; i < axisNames.length; i++) {
if (axisName.equalsIgnoreCase(axisNames[i])) {
return i;
}
}
return noAxis;
}
/** The inverse of {@link #getLogicalAxis}. */
public int getPhysicalAxis(int iLogicalAxis)
{
if (iLogicalAxis < 0) {
return iLogicalAxis;
}
String axisName = axisNames[iLogicalAxis];
for (int i = 0; i < axes.length; i++) {
if (axes[i].axisName.equalsIgnoreCase(axisName)) {
return i;
}
}
return noAxis;
}
/** Convert an axis name, such as "x" or "ROWS" into an axis code. */
public static int getAxisCode(String axisName)
{
if (axisName.equalsIgnoreCase("slicer")) {
return slicerAxis;
} else if (axisName.equals("none")) {
return noAxis;
} else if (axisName.equals("x")) {
axisName = "COLUMNS";
} else if (axisName.equals("y")) {
axisName = "ROWS";
}
for (int i = 0; i < axisNames.length; i++) {
if (axisNames[i].equalsIgnoreCase(axisName)) {
return i;
}
}
return noAxis;
}
/** Inverse of {@link #getAxisCode} */
public static String getAxisName(int iAxis)
{
switch (iAxis) {
case noAxis:
return "NONE";
case slicerAxis:
return "SLICER";
default:
return axisNames[iAxis];
}
}
/** constructs hidden unique name based on given uName. It is used for
* formatting existing measures */
public static String getHiddenMemberUniqueName(String uName)
{
int i = uName.lastIndexOf("].[");
return uName.substring(0, i + 3) + Query.hidden + uName.substring(i+3);
}
/** checks for hidden string in name and strips it out. It looks only for
* first occurence */
public static String stripHiddenName(String name)
{
if (name.indexOf(Query.hidden) != -1)
return name.substring(0, name.indexOf(Query.hidden)) +
name.substring(name.indexOf(Query.hidden) +
Query.hidden.length());
return name;
}
public static String getHiddenMemberFormulaDefinition(String uName)
{return uName;}
/** @return query string as it was send from webUI or workBench
*/
public String toString()
{ return toWebUIMdx();}
public Object[] getChildren()
{
// Chidren are axes, slicer, and formulas (in that order, to be
// consistent with replaceChild).
ArrayList list = new ArrayList();
for (int i = 0; i < axes.length; i++) {
list.add(axes[i]);
}
if (slicer != null) {
list.add(slicer);
}
for (int i = 0; i < formulas.length; i++) {
list.add(formulas[i]);
}
return list.toArray();
}
public void replaceChild(int i, QueryPart with)
{
int i0 = i;
if( i < axes.length ){
if (with == null) {
// We need to remove the axis. Copy the array, omitting
// element i.
QueryAxis[] oldAxes = axes;
axes = new QueryAxis[oldAxes.length - 1];
for (int j = 0; j < axes.length; j++) {
axes[j] = oldAxes[j < i ? j : j + 1];
}
} else {
axes[i] = (QueryAxis) with;
}
return;
}
i -= axes.length;
if (i == 0) {
setSlicer((Exp) with); // replace slicer
return;
}
i -= 1;
if (i < formulas.length) {
if (with == null) {
// We need to remove the formula. Copy the array, omitting
// element i.
Formula[] oldFormulas = formulas;
formulas = new Formula[oldFormulas.length - 1];
for (int j = 0; j < formulas.length; j++) {
formulas[j] = oldFormulas[j < i ? j : j + 1];
}
} else {
formulas[i] = (Formula) with;
}
return;
}
throw Util.getRes().newInternal(
"Query child ordinal " + i0 + " out of range (there are " +
axes.length + " axes, " + formulas.length + " formula)");
}
/** Normalize slicer into a tuple of members; for example, '[Time]' becomes
* '([Time].DefaultMember)'. todo: Make slicer an Axis, not an Exp, and
* put this code inside Axis. */
private void setSlicer(Exp exp)
{
slicer = exp;
if (slicer instanceof Level ||
slicer instanceof Hierarchy ||
slicer instanceof Dimension) {
slicer = new FunCall(
"DefaultMember", new Exp[] {slicer}, FunDef.TypeProperty);
}
if (slicer == null) {
;
} else if (slicer instanceof FunCall &&
((FunCall) slicer).isCallToTuple()) {
;
} else {
slicer = new FunCall(
"()", new Exp[] {slicer}, FunDef.TypeParentheses);
}
}
private boolean usesDimension(Dimension dimension)
{
for (int iAxis = 0; iAxis < axes.length; iAxis++) {
if (axes[iAxis].set.usesDimension(dimension)) {
return true;
}
}
if (slicer != null && slicer.usesDimension(dimension)) {
return true;
}
return false;
}
/** Returns an enumeration, each item of which is an Ob containing a
* dimension which does not appear in any Axis or in the slicer. */
public Iterator unusedDimensions() {
Dimension[] mdxDimensions = mdxCube.getDimensions();
return Arrays.asList(mdxDimensions).iterator();
}
public void addLevelToAxis(int iAxis, Level level)
{
Util.assertTrue(iAxis < axes.length, "axis ordinal out of range");
QueryAxis axis = axes[iAxis];
axis.addLevel(level);
}
/**
* Walk over the tree looking for an expression of a particular hierarchy
* If there is one, return the walker pointing at it (from which we can get
* its parent); otherwise, return null.
**/
private Walker findHierarchy(Hierarchy hierarchy)
{
Walker walker = new Walker(this);
while (walker.hasMoreElements()) {
Object o = walker.nextElement();
if (o instanceof Formula) {
walker.prune();
continue; // ignore expressions in formula
} else if (o instanceof Exp) {
Exp e = (Exp) o;
// expression must represent a set or be mdx element
if (!e.isSet() && !e.isElement())
continue;
// if object's parent is a function (except a tuple/parentheses
// or CrossJoin), algorithm shall look only at the first child
Object parent = walker.getParent();
if (parent instanceof FunCall) {
FunCall funCall = (FunCall) parent;
if (!funCall.isCallToTuple() &&
!funCall.isCallToCrossJoin() &&
funCall.args[0] != o) {
walker.prune();
continue;
}
}
Hierarchy obExpHierarchy = e.getHierarchy();
if (obExpHierarchy == null)
// set must have a dimension (e.g. disallow CrossJoin)
continue;
if (obExpHierarchy.equals(hierarchy))
return walker; // success!
else if( e instanceof FunCall && ((FunCall) e).isCallToFilter()){
// tell walker not to look at any more children of Filter
walker.prune();
}
}
}
return null; // no expression of that dimension found
}
/**
* Returns the hierarchies in an expression
* @see #findHierarchy
*/
private Hierarchy[] collectHierarchies(QueryPart queryPart)
{
Walker walker = new Walker(queryPart);
HashSet set = new HashSet();
while (walker.hasMoreElements()) {
Object o = walker.nextElement();
if (o instanceof Exp) {
Exp e = (Exp) o;
if (!e.isSet() && !e.isMember())
continue; // expression must represent a set or be a member
// if object's parent is a function (except tuple/parentheses
// or CrossJoin), algorithm shall look only at the first child
Object parent = walker.getParent();
if (parent instanceof FunCall) {
FunCall funCall = (FunCall) parent;
if (!funCall.isCallToTuple() &&
!funCall.isCallToCrossJoin() &&
funCall.args[0] != o) {
walker.prune();
continue;
}
}
Hierarchy obExpHierarchy = e.getHierarchy();
if (obExpHierarchy == null)
// set must have a dimension (e.g. disallow CrossJoin)
continue;
set.add(obExpHierarchy);
}
}
return (Hierarchy[]) set.toArray(new Hierarchy[0]);
}
/** Place expression 'exp' at position 'iPositionOnAxis' on axis 'axis'. */
private void putInAxisPosition(Exp exp, int axis, int iPositionOnAxis)
{
switch (axis) {
case slicerAxis:
// slicer shall contain at most one tuple
if (slicer == null) {
setSlicer(exp);
} else {
slicer.addAtPosition(exp, iPositionOnAxis);
}
break;
default:
Util.assertTrue(axis >= 0);
if (axis >= axes.length) {
Util.assertTrue(axis == axes.length);
QueryAxis[] oldAxes = axes;
axes = new QueryAxis[oldAxes.length + 1];
for (int i = 0; i < oldAxes.length; i++) {
axes[i] = oldAxes[i];
}
axes[oldAxes.length] = new QueryAxis(
false, null, this.getAxisName(axis),
QueryAxis.subtotalsUndefined);
}
Exp axisExp = axes[axis].set;
if (axisExp == null || axisExp.isEmptySet()) {
// Axis is empty, so just put expression there.
axes[axis].set = exp;
} else {
if (iPositionOnAxis == 0) {
// 'exp' has to go first:
// axisExp
// becomes
// CrossJoin(exp, axisExp)
FunCall funCrossJoin = new FunCall(
"CrossJoin", new Exp[] {exp, axisExp});
axes[axis].set = funCrossJoin;
} else if (iPositionOnAxis < 0) {
// 'exp' has to go last:
// axisExp
// becomes
// CrossJoin(axisExp, exp)
FunCall funCrossJoin = new FunCall(
"CrossJoin", new Exp[] {axisExp, exp});
axes[axis].set = funCrossJoin;
} else {
int i = axes[axis].set.addAtPosition(exp, iPositionOnAxis);
if (i != -1) {
// The expression was not added, because the position
// equalled or exceded the number of hierarchies. Add
// it on the end.
FunCall funCrossJoin = new FunCall(
"CrossJoin", new Exp[] {axisExp, exp});
axes[axis].set = funCrossJoin;
}
}
}
break;
}
}
/**
* Toggle the drill state of each member of "dimension".
*/
public void drillDown(Level level)
{
Walker walker = findHierarchy(level.getHierarchy());
Util.assertTrue(
walker != null,
"could not find expression of dimension " +
level.getDimension());
Exp e = (Exp) walker.currentElement();
FunCall funDrillDownLevel = new FunCall(
"DrillDownLevel", new Exp[] {e});
QueryPart parent = (QueryPart) walker.getParent();
parent.replaceChild(walker.getOrdinal(), funDrillDownLevel);
}
/**
* Restrict the axis which contains "level" to only return members
* between "startMember" and "endMember", inclusive.
*/
public void crop(
Level level, Member startMember, Member endMember)
{
// Form the cropping expression. If we have a range, include all
// descendents of the ends of the range, because ':' only includes
// members at the same level.
Hierarchy hierarchy = level.getHierarchy();
Exp expCrop =
startMember.equals(endMember)
?
// e.g. {[Beverages]}
new FunCall("{}", new Exp[] {startMember}, FunDef.TypeBraces)
:
// Generate([Beverages]:[Breakfast Foods],
// Descendants([Products].CurrentMember,
// [Products].[(All)],
// SELF_BEFORE_AFTER))
new FunCall(
"Generate",
new Exp[] {
new FunCall(
":",
new Exp[] {startMember, endMember},
FunDef.TypeInfix),
new FunCall(
"Descendants",
new Exp[] {
new FunCall(
"CurrentMember",
new Exp[] {hierarchy},
FunDef.TypeProperty),
hierarchy.lookupLevel("(All)"),
Literal.createSymbol("SELF_BEFORE_AFTER")
})
});
crop(level, expCrop);
}
/**
*
* The technique is to find the expression which generates the set of
* members for that dimension, then intersect it with the cropping set.
*
* For example,
*
* select
* {[Measures].[Unit Sales], [Measures].[Sales Count]} on columns,
* CROSSJOIN(
* [Product].[Product Department].MEMBERS,
* [Gender].[Gender].MEMBERS) on rows
* from Sales
*
* when cropped with {[Beverages], [Breakfast Foods]} becomes
*
* select
* {[Measures].[Unit Sales], [Measures].[Sales Count]} on columns,
* CROSSJOIN(
* INTERSECT(
* [Product].[Product Department].MEMBERS,
* {[Beverages], [Breakfast Foods]}),
* [Gender].[Gender].MEMBERS) on rows
* from Sales
*/
private void crop(Level level, Exp expCrop)
{
boolean found = false;
Walker walker = new Walker(this);
while (walker.hasMoreElements()) {
Object o = walker.nextElement();
if (o instanceof Exp) {
Exp e = (Exp) o;
if (!e.isSet())
continue; // expression must represent a set
Dimension dim = e.getDimension();
if (dim == null)
// set must have a dimension (e.g. disallow Crossjoin)
continue;
if (!dim.equals(level.getDimension()))
continue; // set must be of right dimension
FunCall funIntersect = new FunCall(
"Intersect", new Exp[] {e, expCrop});
QueryPart parent = (QueryPart) walker.getParent();
parent.replaceChild(walker.getOrdinal(), funIntersect);
found = true;
break;
}
}
Util.assertTrue(
found,
"could not find expression of dimension " +
level.getDimension());
}
/** SetParameter. Assign 'value' to parameter 'name'*/
public void setParameter( String sParameter, String value )
{
Parameter param = lookupParam( sParameter );
if (param == null)
{
throw Util.getRes().newMdxParamNotFound(sParameter);
}
param.setValue(value, this);
}
/**
* Moves <code>hierarchy</code> from <code>fromAxis</code>, to
* <code>toAxis</code> at <code>position</code> (-1 means last position).
* The hierarchy is added if <code>fromAxis</code> is {@link #noAxis}, and
* removed if <code>toAxis</code> is {@link #noAxis}.
*
* <p>If the target axis is the slicer, selects the [All] member;
* otherwise, if the hierarchy is already on an axis, keep the same
* drill-state; otherwise, select the first level (children), if expand =
* true, else put default member.</p>
**/
public void moveHierarchy(
Hierarchy hierarchy, int fromAxis, int toAxis, int iPositionOnAxis,
boolean bExpand)
{
Exp e;
// Find the hierarchy in its current position.
Walker walker = findHierarchy(hierarchy.getHierarchy());
if (fromAxis == noAxis) {
if (walker != null) {
throw Util.getRes().newMdxHierarchyUsed(hierarchy.getUniqueName());
}
e = null;
} else {
if (walker == null) {
throw Util.getRes().newMdxHierarchyNotUsed(hierarchy.getUniqueName());
}
// Remove from current position.
e = (Exp) walker.currentElement();
QueryPart parent = (QueryPart) walker.getParent();
Util.assertTrue(parent != null, "hierarchy must have parent");
if (parent instanceof QueryAxis) {
// Axis only contains this hierarchy; remove it.
Util.assertTrue(walker.getAncestor(2) == this);
int iAxis = walker.getAncestorOrdinal(1);
replaceChild(iAxis, null);
if (toAxis > iAxis) {
--toAxis;
}
} else if (parent instanceof Query && fromAxis == slicerAxis) {
// Hierachy sits on the slicer and it's the only hierachy on
// the slicer (otherwise the parent would be _Tuple with at
// least 2 children) and it is being removed - Simply delete
// the slicer
slicer = null;
} else if (parent instanceof FunCall &&
((FunCall) parent).isCallToCrossJoin()) {
// Function must be CrossJoin. If 'e' is our expression, then
// f(..., CrossJoin(e, other), ...)
// becomes
// f(..., other, ...).
int iOrdinal = walker.getOrdinal();
int iOtherOrdinal = 1 - iOrdinal;
Exp otherExp = ((FunCall) parent).args[iOtherOrdinal];
QueryPart grandparent = (QueryPart) walker.getAncestor(2);
int iParentOrdinal = walker.getAncestorOrdinal(1);
grandparent.replaceChild(iParentOrdinal, (QueryPart) otherExp);
} else if (parent instanceof FunCall &&
((FunCall)parent).isCallToTuple() &&
fromAxis == slicerAxis ){
int iOrdinal = walker.getOrdinal();
((FunCall)slicer).removeChild( iOrdinal );
} else if (parent instanceof Parameter) {
// The hierarchy is a child of parameter, so we need to remove
// the parameter itself.
QueryPart grandparent = (QueryPart) walker.getAncestor(2);
int iParentOrdinal = walker.getAncestorOrdinal(1);
if( grandparent instanceof FunCall &&
((FunCall)grandparent).isCallToTuple() &&
fromAxis == slicerAxis ){
((FunCall)slicer).removeChild( iParentOrdinal );
if( ((FunCall)slicer).args.length == 0 ){
// the slicer is empty now
slicer = null;
}
} else if (grandparent instanceof FunCall &&
((FunCall) grandparent).isCallToCrossJoin()) {
// Function must be CrossJoin. If 'e' is our expression,
// then
// f(..., CrossJoin(e, other), ...)
// becomes
// f(..., other, ...).
int iOtherOrdinal = 1 - iParentOrdinal;
Exp otherExp = ((FunCall) grandparent).args[iOtherOrdinal];
QueryPart grandGrandparent = (QueryPart)
walker.getAncestor(3);
int iGrandParentOrdinal = walker.getAncestorOrdinal(2);
grandGrandparent.replaceChild(
iGrandParentOrdinal, (QueryPart) otherExp);
}
} else {
throw Util.getRes().newInternal(
"hierarchy starts under " + parent.toString());
}
}
// Move to slicer?
switch (toAxis) {
case slicerAxis:
// we do not care of expression is already a Member, because it's a
// very rare case; we have to make a new expression containing
// default
e = new FunCall("DefaultMember", new Exp[] {hierarchy}, FunDef.TypeProperty);
putInAxisPosition(e, toAxis, iPositionOnAxis);
break;
case xAxis:
case yAxis:
// If this hierarchy is new, create an expression to display the
// children of the default member (which is, we hope, the root
// member).
if (e == null) {
if (bExpand)
e = new FunCall("Children", new Exp[] {hierarchy}, FunDef.TypeProperty);
else {
e = new FunCall("DefaultMember", new Exp[] {hierarchy},
FunDef.TypeProperty);
e = new FunCall("{}", new Exp[] {e}, FunDef.TypeBraces);
}
} else if (fromAxis == slicerAxis) {
// Expressions on slicers are stored as DefaultMember. We need
// to convert it to $Brace expression first (curly braces
// needed).
e = new FunCall("{}", new Exp[] {e}, FunDef.TypeBraces);
}
// Move to regular axis.
putInAxisPosition(e, toAxis, iPositionOnAxis);
break;
case noAxis:
// Discard hierarchy. Nothing to do.
break;
default:
throw Util.getRes().newInternal("bad axis code: " + toAxis);
}
}
/**
* Filters the set of elements which are returned from a hierarchy. If
* hierarchy is in the slicer, the set must contain exactly one element.
* (Hierarchy must be on the axis specified.)
*
* 'members' are the members to be displayed. They may be from different
* levels - for example, {[USA], [USA].[California]} - and their order is
* important.
**/
public void filterHierarchy(
Hierarchy hierarchy, int /*axisType*/ axis, Member[] members)
{
// Check that there can be only one filter per hierarchy applied on
// slicer.
if (axis == slicerAxis && members.length > 1) {
throw Util.getRes().newInternal(
"there can be only one filter per hierarchy on slicer");
}
// Check that members are all in the right hierarchy.
for (int iMember = 0; iMember < members.length; iMember++) {
if (!members[iMember].getHierarchy().equals(hierarchy)) {
throw Util.getRes().newInternal(
"member " + members[iMember] +
" is not in hierarchy " + hierarchy);
}
}
Walker walker = findHierarchy(hierarchy.getHierarchy());
if (walker == null) {
// Hierarchy is not currently used. Put it at the last position on
// the desired axis, then filter it.
moveHierarchy(hierarchy, noAxis, axis, -1, true);
walker = findHierarchy(hierarchy.getHierarchy());
Util.assertTrue(walker != null, "hierarchy wasn't added");
}
// The expression we find may be either:
// a) a member, for example '[Gender].[M]' in
// ([Gender].[M], [Marital Status].[S])
// or in
// CrossJoin([Marital Status].Members, {[Gender].[M]}); or
// b) a set, for example '[Gender].Members' in
// CrossJoin([Store].Members, [Gender].Members).
// We replace a set with a set, and a member with a member.
QueryPart parent = (QueryPart) walker.getParent();
int iOrdinal = walker.getOrdinal();
Exp e = (Exp) walker.currentElement();
if (e.isMember()) {
Util.assertTrue(
members.length == 1,
"filterHierarchy cannot replace member with set");
parent.replaceChild(iOrdinal, (QueryPart) members[0]);
} else if (e.isSet()) {
// Build a set out of the members supplied using the "{}" operator.
// If there are no members, revert to the default member (for bug
// 13728).
Exp[] exps = members;
if (members.length == 0) {
exps = new Exp[] {new FunCall(
"DefaultMember", new Exp[] {hierarchy}, FunDef.TypeProperty)};
}
// Neither slicer nor the tuple function (which is likely to occur
// in a slicer) can have a set as a child, so reduce a singleton
// set to a member in these cases.
Exp exp =
exps.length == 1 &&
(parent instanceof Query || // because e is slicer
parent instanceof FunCall &&
((FunCall) parent).isCallToTuple())
? exps[0]
: new FunCall("{}", exps, FunDef.TypeBraces);
parent.replaceChild(iOrdinal, (QueryPart) exp);
} else {
throw Util.newInternal(
"findHierarchy returned a " +
Exp.catEnum.getName(e.getType()));
}
}
/** ToggleDrillState. */
public void toggleDrillState(Member member)
{
Walker walker = findHierarchy(member.getHierarchy());
if (walker == null) throw Util.getRes().newInternal(
"member's dimension is not used: " + member.toString());
// If 'e' is our expression, then
// f(..., e, ...)
// becomes
// f(..., ToggleDrillState(e, {member}), ...)
Exp e = (Exp) walker.currentElement();
FunCall funToggle = new FunCall(
"ToggleDrillState", new Exp[] {
e, new FunCall(
"{}",
new Exp[] {member},
FunDef.TypeBraces)});
QueryPart parent = (QueryPart) walker.getParent();
int iOrdinal = walker.getOrdinal();
parent.replaceChild(iOrdinal, funToggle);
}
/** Sort. 'axis' is the axis to sort; direction is one of {ascending,
* descending, none}; specification is the expression to sort on. For
* example, the y-axis can be sorted by [Time].[Quarter] (its name), or by
* {[Measures].[Unit Sales], [Stores].[California]} (Unit Sales in
* California). In general, the latter specification identifies a single
* column (or row, for x-axis sorting) for each hierarchy on the other
* axis. This function always removes previous sort on iAxis. If direcion
* is "none" then axis becomes sorted in natural order (no explicit
* sorting)*/
public void sort(
int /*axisType*/ axis, int /*sortDirection*/ direction,
Member[] members)
{
Util.assertTrue(axis < axes.length, "Bad axis code");
// Find and remove any existing sorts on this axis.
removeSortFromAxis(axis);
//apply new sort
String sDirection;
switch (direction) {
case ascDirection: sDirection = "ASC"; break;
case descDirection: sDirection = "DESC"; break;
case bascDirection: sDirection = "BASC"; break;
case bdescDirection: sDirection = "BDESC"; break;
case noneDirection: /*we already removed the sort*/ return;
default:
throw Util.getRes().newInternal("bad direction code " + direction);
}
Exp e = axes[axis].set;
if (members.length == 0)
// No members to sort on means use default sort order. As
// we've already removed any sorters, we're done.
return;
else {
FunCall funOrder = new FunCall(
"Order",
new Exp[] {
e,
members.length == 0 ? null : // handled above
members.length == 1 ? (Exp) members[0] :
(Exp) new FunCall(
"()",
members,
FunDef.TypeParentheses),
Literal.createSymbol(sDirection)});
axes[axis].set = funOrder;
}
}
/** Finds and removes existing sorts and TopBottomN functions from axis*/
public void removeSortFromAxis(int /*axisType*/ axis)
{
// Find and remove any existing sorts on this axis.
Util.assertTrue(axis < axes.length, "Bad axis code");
Walker walker = new Walker((QueryPart) axes[axis].set);
while (walker.hasMoreElements()) {
Object o = walker.nextElement();
if (o instanceof FunCall) {
FunCall funCall = (FunCall) o;
if (!funCall.isCallTo("Order") &&
!isValidTopBottomNName(funCall.getFunName()))
continue;
Exp e = funCall.args[0];
QueryPart parent = (QueryPart) walker.getParent();
if (parent == null) {
axes[axis].set = e;
} else {
parent.replaceChild(walker.getOrdinal(), (QueryPart) e);
}
}
}
}
/** Calls removeSortFromAxis first and then applies TopBottomN function to
* the axis */
public void applyTopBottomN(
int /*axisType*/ axis, String /*function's name*/ fName,
Integer n, Member[] members)
{
Util.assertTrue(fName != null, "TopBottomN function name" +
" can not be null");
Util.assertTrue(axis < axes.length, "Bad axis code");
if (members.length == 0) throw Util.getRes().newMdxTopBottomNRequireSortMember();
if (!isValidTopBottomNName(fName)) throw Util.getRes().newMdxTopBottomInvalidFunctionName(fName);
// Find and remove any existing sorts on this axis.
removeSortFromAxis(axis);
Exp e = axes[axis].set;
FunCall funOrder = new FunCall(
fName,
new Exp[] {
e,
Literal.create(n),
members.length == 1 ? (Exp) members[0] :
(Exp) new FunCall(
"()", members, FunDef.TypeParentheses)});
axes[axis].set = funOrder;
}
boolean isValidTopBottomNName(String fName)
{
if (fName.equalsIgnoreCase("TopCount") ||
fName.equalsIgnoreCase("BottomCount") ||
fName.equalsIgnoreCase("TopPercent") ||
fName.equalsIgnoreCase("BottomPercent"))
return true;
return false;
}
/** Returns filtered sQuery based on user's grant privileges. {@link
* CubeAccess} contains a list of forbidden hiearchies, and limited
* members. oFilterAxesMembers[0] will contain array of mdxMembers, which
* would have to be applied after query execution. (it is very hard to
* apply limited members on expressions like [Food].children)
* */
public String processFilterQuery(
CubeAccess cubeAccess, ArrayList oFilterAxesMembers[])
{
if (!cubeAccess.hasRestrictions()){
return this.toPlatoMdx();
}
//it is possible that some of parameters are no longer used and
// they have to be purged from ParametersDefs
resolveParameters();
Query query = safeClone();
oFilterAxesMembers[0] = query.applyPermissions(cubeAccess);
return query.toPlatoMdx();
}
private ArrayList applyPermissions(CubeAccess cubeAccess)
{
ArrayList filterAxesMemberList = null;
// first check: if query contains any forbidden hierarchies
Hierarchy[] noAccessHierarchies =
cubeAccess.getNoAccessHierarchies();
if (noAccessHierarchies != null){
for( int i = 0; i < noAccessHierarchies.length; i++ ){
Walker walker = findHierarchy( noAccessHierarchies[i] );
if (walker != null){
// noAccess hierarchy is used; reject the query
throw Util.getRes().newUserDoesNotHaveRightsTo(
noAccessHierarchies[i].getUniqueName());
}
}
}
//second check: we need to apply restricted hierarchies
Member[] limitedMembers = cubeAccess.getLimitedMembers();
if (limitedMembers != null){
for (int i = 0; i < limitedMembers.length; i++){
Hierarchy limitedHierarchy =
limitedMembers[i].getHierarchy();
Member[] mdxMember={ limitedMembers[i] };
Walker walker = findHierarchy(limitedHierarchy);
if (walker == null) {
//put limitedMember on the slicer
filterHierarchy(limitedHierarchy, slicerAxis, mdxMember);
} else {
// the hierarchy is used somewhere in query
// if it is used on the slicer, we should modify the slicer
// to include it. If it is used on one of the axes, it
// we will parse returned results
int axis = getAxisCodeForWalker(walker);
if (axis == slicerAxis){
Object foundNode = walker.currentElement();
Member foundMember = null;
if (foundNode instanceof Member){
foundMember = (Member) foundNode;
} else if (foundNode instanceof FunCall &&
((FunCall) foundNode).isCallToTuple()) {
// tuple has only one node, which is our possible
// target
FunCall funCall = (FunCall) foundNode;
if (funCall.args[0] instanceof Member) {
foundMember = (Member) funCall.args[0];
}
}
if (foundMember != null){
//we found member (not member expression)
applyLimitOnMember(
foundMember, limitedMembers[i], axis);
} else {
// it looks like member is within an expression on
// slicer. let's remove it from there and add
// member
moveHierarchy(
limitedHierarchy, slicerAxis, noAxis, 0, true);
filterHierarchy(
limitedHierarchy, slicerAxis, mdxMember);
}
} else if (axis == xAxis || axis == yAxis) {
if (walker.currentElement() instanceof Member) {
// try to apply the limitation before executing
// query
applyLimitOnMember(
(Member) walker.currentElement(),
limitedMembers[i], axis);
} else {
// there might be an expression on the axes. we
// will filter the result set, so we need to build
// filters
if (filterAxesMemberList == null) {
filterAxesMemberList = new ArrayList();
}
filterAxesMemberList.add(limitedMembers[i]);
}
}
}
}
}
return filterAxesMemberList;
}
/**
* This function takes the walker, which is presumably Member or
* Hierarchy and return the axis code, on which it was found.
**/
private int getAxisCodeForWalker(Walker walker)
{
int depth = 0;
Object parent = walker.getAncestor(depth);
Util.assertTrue(
parent != null,
"failed to find Axis for" + walker.currentElement().toString());
//walk up the tree
++depth;
Object grandParent = walker.getAncestor( depth );
++depth;
while (grandParent != null &&
!(grandParent instanceof Query)) {
parent = grandParent;
grandParent = walker.getAncestor( depth );
++depth;
}
if (parent instanceof Axis) {
if (((QueryAxis) parent).axisName.equals("columns")) {
return xAxis;
}
return yAxis;
} else if (parent instanceof FunCall &&
((FunCall) parent).isCallToTuple()) {
return slicerAxis;
}
return noAxis;
}
private void applyLimitOnMember(
Member foundMember, Member limitedMember, int axis)
{
if (foundMember.isChildOrEqualTo(limitedMember)) {
return;
} else if (limitedMember.isChildOrEqualTo(foundMember)) {
Member[] mdxMembers = {limitedMember};
filterHierarchy(foundMember.getHierarchy(), axis, mdxMembers);
} else {
// limitedMember and foundMember are not inheriting each other
// example. [OR].[Seattle] and [CA].[San Jose]
throw Util.getRes().newUserDoesNotHaveRightsTo(
foundMember.getUniqueName());
}
}
public Parameter createOrLookupParam(FunCall fParam)
{
//this is a definition of parameter
Util.assertTrue(
fParam.args[0] instanceof Literal,
"The name of parameter has to be a quoted string");
String name = (String) ((Literal)fParam.args[0]).getValue();
Parameter param = lookupParam(name);
if( param == null ){
// Create a new parameter.
param = new Parameter( fParam );
// Append it to the array of known parameters.
Parameter[] oldParameters = parameters;
parameters = new Parameter[oldParameters.length + 1];
for (int i = 0; i < oldParameters.length; i++) {
parameters[i] = oldParameters[i];
}
parameters[oldParameters.length] = param;
} else {
// the parameter is already defined, update it
param.update( fParam );
}
return param;
}
public Parameter lookupParam( String pName )
{
for( int i = 0; i < parameters.length; i++ ){
if( parameters[i].getName().equals(pName )){
return parameters[i];
}
}
return null;
}
//validate each parameter, calculated their usage and clean unused ones
void resolveParameters()
{
//validate definitions
ArrayList validParameters = new ArrayList();
for (int i = 0; i < parameters.length; i++) {
if (!parameters[i].isToBeDeleted()) {
parameters[i].validate(this);
validParameters.add(parameters[i]);
}
}
parameters = (Parameter[]) validParameters.toArray(new Parameter[0]);
//calculate usage
for (int i = 0; i < parameters.length; i++) {
parameters[i].nUses = 0;
}
Walker queryElements = new Walker(this);
while (queryElements.hasMoreElements()) {
Object queryElement = queryElements.nextElement();
if (queryElement instanceof Parameter) {
boolean found = false;
for (int i = 0; i < parameters.length; i++) {
if (parameters[i].equals( queryElement )){
parameters[i].nUses++;
found = true;
break;
}
}
if (!found) throw Util.getRes().newMdxParamNotFound(
((Parameter) queryElement).name);
}
}
}
/**
* Returns the parameters used in this query.
**/
public Parameter[] getParameters()
{
resolveParameters();
return parameters;
}
void resetParametersPrintProperty()
{
for( int i = 0; i < parameters.length; i++ )
parameters[i].resetPrintProperty();
}
// implement NameResolver
public Cube getCube() {
return mdxCube;
}
// public OlapElement get(OlapElement e) {
// return mdxCube.get(e);
// public Dimension get(Dimension dimension, Cube parent) {
// return mdxCube.get(dimension, parent);
// public Hierarchy get(Hierarchy hierarchy, Dimension parent) {
// return mdxCube.get(hierarchy, parent);
// public Level get(Level level, Hierarchy parent) {
// return mdxCube.get(level, parent);
// public Member get(Member member, Level parent) {
// return mdxCube.get(member, parent);
// implement NameResolver
public OlapElement lookupChild(
OlapElement parent, String s, boolean failIfNotFound)
{
OlapElement mdxElement = null;
// first look in cube
mdxElement = mdxCube.lookupChild(parent, s, false);
if (mdxElement != null)
return mdxElement;
// then look in defined members
Iterator definedMembers = getDefinedMembers().iterator();
while (definedMembers.hasNext()) {
Member mdxMember = (Member) definedMembers.next();
if (mdxMember.getName().equalsIgnoreCase(s)) //member might be
// referenced without dimension name in the query - bug21327
return mdxMember;
}
// then in defined sets
for (int i = 0; i < formulas.length; i++) {
Formula formula = formulas[i];
if (formula.isMember)
continue; // have already done these
if (formula.names[0].equals(s)) {
return formula.mdxSet;
}
}
// fail if we didn't find it
if (mdxElement == null && failIfNotFound) {
throw Util.getRes().newMdxChildObjectNotFound(
s, parent.getQualifiedName());
}
return mdxElement;
}
// implement NameResolver
public Member lookupMember(String s, boolean failIfNotFound) {
return Util.lookupMember(this,s,failIfNotFound);
}
// implement NameResolver
public Member lookupMemberCompound(
String[] names, boolean failIfNotFound)
{
return Util.lookupMemberCompound(this, names, failIfNotFound);
}
// implement NameResolver
public Member lookupMemberByUniqueName(String s, boolean failIfNotFound)
{
Member member = lookupMemberFromCache(s);
if (member == null) {
member = mdxCube.lookupMemberByUniqueName(s, failIfNotFound);
}
return member;
}
// implement NameResolver
public Member lookupMemberFromCache(String s) {
// first look in defined members
Iterator definedMembers = getDefinedMembers().iterator();
while (definedMembers.hasNext()) {
Member mdxMember = (Member) definedMembers.next();
if (mdxMember.getUniqueName().equals(s)) {
return mdxMember;
}
}
// then look in cube
return mdxCube.lookupMemberFromCache(s);
}
/** Return an array of the formulas used in this query. */
public Formula[] getFormulas()
{
return formulas;
}
/** Remove a formula from the query. If <code>failIfUsedInQuery</code> is
* true, checks and throws an error if formula is used somewhere in the
* query; otherwise, what??? */
public void removeFormula(String uniqueName, boolean failIfUsedInQuery)
{
Formula formula = findFormula(uniqueName);
if (failIfUsedInQuery && formula != null) {
OlapElement mdxElement = formula.getElement();
//search the query tree to see if this formula expression is used
//anywhere (on the axes or in another formula)
Walker walker = new Walker(this);
while (walker.hasMoreElements()) {
Object queryElement = walker.nextElement();
if (!queryElement.equals(mdxElement)) {
continue;
}
// mdxElement is used in the query. lets find on on which axis
// or formula
String formulaType = formula.isMember() ?
Util.getRes().getCalculatedMember() :
Util.getRes().getCalculatedSet();
int i = 0;
Object parent = walker.getAncestor(i);
Object grandParent = walker.getAncestor(i+1);
while (parent != null && grandParent != null) {
if (grandParent instanceof Query) {
if (parent instanceof Axis) {
throw Util.getRes().newMdxCalculatedFormulaUsedOnAxis(
formulaType, uniqueName,
((QueryAxis) parent).axisName);
} else if (parent instanceof Formula) {
String parentFormulaType =
((Formula) parent).isMember() ?
Util.getRes().getCalculatedMember() :
Util.getRes().getCalculatedSet();
throw Util.getRes().newMdxCalculatedFormulaUsedInFormula(
formulaType, uniqueName, parentFormulaType,
((Formula) parent).getUniqueName());
} else {
throw Util.getRes().newMdxCalculatedFormulaUsedOnSlicer(
formulaType, uniqueName);
}
}
++i;
parent = walker.getAncestor(i);
grandParent = walker.getAncestor(i+1);
}
throw Util.getRes().newMdxCalculatedFormulaUsedInQuery(
formulaType, uniqueName, this.toWebUIMdx());
}
}
//remove formula from query
ArrayList formulaList = new ArrayList();
for (int i = 0; i < formulas.length; i++) {
if (!formulas[i].getUniqueName().equalsIgnoreCase(uniqueName)) {
formulaList.add(formulas[i]);
}
}
// it has been found and removed
this.formulas = (Formula[]) formulaList.toArray(new Formula[0]);
}
/** finds calculated member or set in array of formulas */
public Formula findFormula(String uniqueName)
{
for (int i = 0; i < formulas.length; i++) {
if (formulas[i].getUniqueName().equalsIgnoreCase(uniqueName))
return formulas[i];
}
return null;
}
/** finds formula by name and renames it to new name */
public void renameFormula(String uniqueName, String newName)
{
Formula formula = findFormula(uniqueName);
if (formula == null) throw Util.getRes().newMdxFormulaNotFound(
"formula", uniqueName, toWebUIMdx());
formula.rename(newName);
}
ArrayList getDefinedMembers()
{
ArrayList definedMembers = new ArrayList();
for (int i = 0; i < formulas.length; i++) {
if (formulas[i].isMember && formulas[i].getElement() != null) {
definedMembers.add((Member)formulas[i].getElement());
}
}
return definedMembers;
}
/** finds axis by index and sets flag to show empty cells on that axis*/
public void setAxisShowEmptyCells(int axis, boolean showEmpty)
{
if (axis >= axes.length) {
throw Util.getRes().newMdxAxisShowEmptyCellsNotSupported(
new Integer(axis));
}
axes[axis].nonEmpty = !showEmpty;
}
/** finds axis by index and adds/removes subtotals. It finds all
* hierarchies used on axis, then for every hierarchy it finds the
* expression, where it's used. Using that expression, it executes mdx
* query to generate array of mdxMembers. Based on
* <code>showSubtotals</code> it modifies array of mdxMembers and
* substitutes expression with set, which is created based on array of
* mdxMembers */
public void setAxisShowSubtotals(int axis, boolean showSubtotals)
{
if (axis >= axes.length || axis < 0) {
//based on Prashant request: don't throw error-just return
return;
}
String sCalculatedMembers = null;
if (formulas != null) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
for (int i = 0; i < formulas.length; i++) {
if (i == 0) {
pw.print("with ");
} else {
pw.print(" ");
}
formulas[i].unparse(pw, new PlatoCallBack());
pw.println();
}
sCalculatedMembers = sw.toString();
}
Hierarchy[] mdxHierarchies = collectHierarchies(axes[axis]);
for (int j = 0; j < mdxHierarchies.length; j++) {
Walker walker = findHierarchy(mdxHierarchies[j]);
Exp e = (Exp) walker.currentElement();
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.unparse(pw, new PlatoCallBack());
String sExp = sw.toString();
String sQuery = "";
if (sCalculatedMembers != null)
sQuery = sCalculatedMembers;
sQuery += "select {" + sExp + "} on columns from [" +
mdxCube.getUniqueName() + "]";
Member[] mdxMembers = mdxCube.getMembersForQuery(
sQuery, getDefinedMembers());
HashSet set = new HashSet();
if (showSubtotals) {
// we need to put all those members plus all their parent
// members
for (int i = 0; i < mdxMembers.length; i++){
if (!set.contains(mdxMembers[i])) {
Member[] parentMembers =
mdxMembers[i].getAncestorMembers();
for (int k = parentMembers.length - 1; k >= 0; k
if (!set.contains(parentMembers[k]))
set.add(parentMembers[k]);
}
set.add(mdxMembers[i]);
}
}
} else {
//we need to put only members with biggest depth
int nMaxDepth = 0;
for (int i = 0; i < mdxMembers.length; i++){
if (nMaxDepth < mdxMembers[i].getDepth())
nMaxDepth = mdxMembers[i].getDepth();
}
for (int i = 0; i < mdxMembers.length; i++){
if (nMaxDepth == mdxMembers[i].getDepth()) {
set.add(mdxMembers[i]);
}
}
}
Member[] goodMembers = (Member[]) set.toArray(new Member[0]);
filterHierarchy(mdxHierarchies[j], axis, goodMembers);
}
axes[axis].setShowSubtotals(showSubtotals);
}
/** returns <code>Hierarchy[]</code> used on <code>axis</code>. It calls
* collectHierarchies() */
public Hierarchy[] getMdxHierarchiesOnAxis(int axis)
{
if (axis >= axes.length) {
throw Util.getRes().newMdxAxisShowSubtotalsNotSupported(new Integer(axis));
}
if (axis == Query.slicerAxis) {
return collectHierarchies((QueryPart) slicer);
} else {
return collectHierarchies(axes[axis]);
}
}
}
// End Query.java
|
package ifc.chart;
import lib.MultiPropertyTest;
import lib.Status;
import lib.StatusException;
import com.sun.star.beans.XPropertySet;
import com.sun.star.chart.XChartDocument;
import com.sun.star.chart.XDiagram;
import com.sun.star.uno.UnoRuntime;
/**
* Testing <code>com.sun.star.chart.BarDiagram</code>
* service properties:
* <ul>
* <li><code>'Vertical'</code></li>
* <li><code>'Deep'</code></li>
* <li><code>'StackedBarsConnected'</code></li>
* <li><code>'NumberOfLines'</code></li>
* </ul>
* Properties testing is automated
* by <code>lib.MultiPropertyTest</code> except property
* <code>'NumberOfLines'</code>. <p>
* This test needs the following object relations :
* <ul>
* <li> <code>'CHARTDOC'</code> (of type <code>XChartDocument</code>):
* to have reference to chart document </li>
* <li> <code>'BAR'</code> (of type <code>XDiagram</code>):
* relation that use as parameter for method setDiagram of chart document </li>
* <ul> <p>
* @see com.sun.star.chart.BarDiagram
* @see com.sun.star.chart.XChartDocument
* @see com.sun.star.chart.XDiagram
* @see lib.MultiPropertyTest
*/
public class _BarDiagram extends MultiPropertyTest {
XChartDocument doc = null;
XDiagram oldDiagram = null;
/**
* Retrieves object relations and prepares a chart document.
* @throws StatusException if one of relations not found.
*/
protected void before() {
log.println("Setting Diagram type to BarDiagram");
doc = (XChartDocument) tEnv.getObjRelation("CHARTDOC");
if (doc == null) throw new StatusException(Status.failed
("Relation 'CHARTDOC' not found"));
XDiagram bar = (XDiagram) tEnv.getObjRelation("BAR");
if (bar == null) throw new StatusException(Status.failed
("Relation 'BAR' not found"));
oldDiagram = doc.getDiagram();
doc.setDiagram(bar);
oObj = (XPropertySet)
UnoRuntime.queryInterface( XPropertySet.class, doc.getDiagram() );
log.println("Set it to 3D");
try {
oObj.setPropertyValue("Dim3D", new Boolean(true));
} catch(com.sun.star.lang.WrappedTargetException e) {
log.println("Exception while set property value");
e.printStackTrace(log);
throw new StatusException("Exception while set property value", e);
} catch(com.sun.star.lang.IllegalArgumentException e) {
log.println("Exception while set property value");
e.printStackTrace(log);
throw new StatusException("Exception while set property value", e);
} catch(com.sun.star.beans.PropertyVetoException e) {
log.println("Exception while set property value");
e.printStackTrace(log);
throw new StatusException("Exception while set property value", e);
} catch(com.sun.star.beans.UnknownPropertyException e) {
log.println("Exception while set property value");
e.printStackTrace(log);
throw new StatusException("Exception while set property value", e);
}
}
/**
* Sets the old diagram for a chart document.
*/
protected void after() {
doc.setDiagram(oldDiagram);
}
protected PropertyTester LineTester = new PropertyTester() {
protected Object getNewValue(String propName, Object oldValue)
throws java.lang.IllegalArgumentException {
int a = 0;
int b = 2;
if ( ((Integer) oldValue).intValue() == a)
return new Integer(b); else
return new Integer(a);
}
} ;
/**
* Tests property 'NumberOfLines'.
* This property tests when diagram in 2D-mode only
* except all other properties. This property is currently supported by
* two dimensional vertical bar charts only.
*/
public void _NumberOfLines() {
log.println("Set it to 2D");
try {
oObj.setPropertyValue("Dim3D", new Boolean(false));
oObj.setPropertyValue("Vertical", new Boolean(false));
} catch(com.sun.star.lang.WrappedTargetException e) {
log.println("Exception while set property value");
e.printStackTrace(log);
throw new StatusException("Exception while set property value", e);
} catch(com.sun.star.lang.IllegalArgumentException e) {
log.println("Exception while set property value");
e.printStackTrace(log);
throw new StatusException("Exception while set property value", e);
} catch(com.sun.star.beans.PropertyVetoException e) {
log.println("Exception while set property value");
e.printStackTrace(log);
throw new StatusException("Exception while set property value", e);
} catch(com.sun.star.beans.UnknownPropertyException e) {
log.println("Exception while set property value");
e.printStackTrace(log);
throw new StatusException("Exception while set property value", e);
}
log.println("Testing with custom Property tester") ;
testProperty("NumberOfLines", LineTester) ;
}
} // EOF BarDiagram
|
package net.jforum.view.admin;
import java.io.FileInputStream;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.servlet.http.HttpServletResponse;
import net.jforum.ActionServletRequest;
import net.jforum.entities.Category;
import net.jforum.entities.Forum;
import net.jforum.repository.ForumRepository;
import net.jforum.repository.TopicRepository;
import net.jforum.util.I18n;
import net.jforum.util.preferences.ConfigKeys;
import net.jforum.util.preferences.SystemGlobals;
import net.jforum.util.preferences.TemplateKeys;
import freemarker.template.SimpleHash;
/**
* @author Rafael Steil
* @version $Id: ConfigAction.java,v 1.14 2006/02/12 14:55:22 rafaelsteil Exp $
*/
public class ConfigAction extends AdminCommand
{
public ConfigAction() {}
public ConfigAction(ActionServletRequest request,
HttpServletResponse response,
SimpleHash context)
{
this.request = request;
this.response = response;
this.context = context;
}
public void list() throws Exception {
Properties p = new Properties();
Iterator iter = SystemGlobals.fetchConfigKeyIterator();
while (iter.hasNext()) {
String key = (String) iter.next();
String value = SystemGlobals.getValue(key);
p.put(key, value);
}
Properties locales = new Properties();
locales.load(new FileInputStream(SystemGlobals.getValue(ConfigKeys.CONFIG_DIR)
+ "/languages/locales.properties"));
List localesList = new ArrayList();
for (Enumeration e = locales.keys(); e.hasMoreElements();) {
localesList.add(e.nextElement());
}
this.context.put("config", p);
this.context.put("locales", localesList);
this.setTemplateName(TemplateKeys.CONFIG_LIST);
}
public void editSave() throws Exception
{
this.updateData(this.getConfig());
this.list();
}
Properties getConfig()
{
Properties p = new Properties();
Enumeration e = this.request.getParameterNames();
while (e.hasMoreElements()) {
String name = (String) e.nextElement();
if (name.startsWith("p_")) {
p.setProperty(name.substring(name.indexOf('_') + 1), this.request.getParameter(name));
}
}
return p;
}
void updateData(Properties p) throws Exception
{
int oldTopicsPerPage = SystemGlobals.getIntValue(ConfigKeys.TOPICS_PER_PAGE);
for (Iterator iter = p.entrySet().iterator(); iter.hasNext(); ) {
Map.Entry entry = (Map.Entry)iter.next();
SystemGlobals.setValue((String)entry.getKey(), (String)entry.getValue());
}
SystemGlobals.saveInstallation();
I18n.changeBoardDefault(SystemGlobals.getValue(ConfigKeys.I18N_DEFAULT));
// If topicsPerPage has changed, force a reload in all forums
if (oldTopicsPerPage != SystemGlobals.getIntValue(ConfigKeys.TOPICS_PER_PAGE)) {
List categories = ForumRepository.getAllCategories();
for (Iterator iter = categories.iterator(); iter.hasNext(); ) {
Category c = (Category)iter.next();
for (Iterator iter2 = c.getForums().iterator(); iter2.hasNext(); ) {
Forum f = (Forum)iter2.next();
TopicRepository.clearCache(f.getId());
}
}
}
}
}
|
package net.kevxu.senselib;
import java.util.ArrayList;
import java.util.List;
import net.kevxu.senselib.StepDetector.StepListener;
import android.content.Context;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.util.Log;
/**
* Class for providing location information. start() and stop() must be
* explicitly called to start and stop the internal thread.
*
* @author Kaiwen Xu
*/
public class LocationService extends SensorService implements LocationListener, StepListener {
private static final String TAG = "LocationService";
public static int LEVEL_GPS_NOT_ENABLED = 0;
public static int LEVEL_GPS_ENABLED = 1;
// Average step distance for human (in meters)
private static final float CONSTANT_AVERAGE_STEP_DISTANCE = 0.7874F;
// Average step time for human (in meters)
private static final long CONSTANT_AVERGAE_STEP_TIME = 500L;
private static final int GPS_UPDATE_MULTIPLIER = 1;
private static final long GPS_UPDATE_MIN_TIME = CONSTANT_AVERGAE_STEP_TIME * GPS_UPDATE_MULTIPLIER;
private static final float GPS_UPDATE_MIN_DISTANCE = CONSTANT_AVERAGE_STEP_DISTANCE * GPS_UPDATE_MULTIPLIER;
private Context mContext;
private LocationManager mLocationManager;
private List<LocationServiceListener> mLocationServiceListeners;
private StepDetector mStepDetector;
private LocationServiceFusionThread mLocationServiceFusionThread;
private volatile int mServiceLevel;
public interface LocationServiceListener {
/**
* Called when service level changed. Service level includes
* LocationService.LEVEL_*.
*
* @param level
* Service level.
*/
public void onServiceLevelChanged(int level);
public void onLocationChanged(Location location);
}
protected LocationService(Context context, StepDetector stepDetector) throws SensorNotAvailableException {
this(context, stepDetector, null);
}
protected LocationService(Context context, StepDetector stepDetector, LocationServiceListener locationServiceListener) throws SensorNotAvailableException {
mContext = context;
mLocationManager = (LocationManager) mContext.getSystemService(Context.LOCATION_SERVICE);
mLocationServiceListeners = new ArrayList<LocationServiceListener>();
if (locationServiceListener != null) {
mLocationServiceListeners.add(locationServiceListener);
}
mStepDetector = stepDetector;
mStepDetector.addListener(this);
}
/**
* Call this when start or resume.
*/
@Override
protected void start() {
if (mLocationServiceFusionThread == null) {
mLocationServiceFusionThread = new LocationServiceFusionThread();
mLocationServiceFusionThread.start();
Log.i(TAG, "LocationServiceFusionThread started.");
}
if (mLocationManager == null) {
mLocationManager = (LocationManager) mContext.getSystemService(Context.LOCATION_SERVICE);
}
mLocationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER,
GPS_UPDATE_MIN_TIME, GPS_UPDATE_MIN_DISTANCE, this);
Log.i(TAG, "GPS update registered.");
Log.i(TAG, "LocationService started.");
}
/**
* Call this when pause.
*/
@Override
protected void stop() {
mLocationServiceFusionThread.terminate();
Log.i(TAG, "Waiting for LocationServiceFusionThread to stop.");
try {
mLocationServiceFusionThread.join();
} catch (InterruptedException e) {
Log.w(TAG, e.getMessage(), e);
}
Log.i(TAG, "LocationServiceFusionThread stoppped.");
mLocationServiceFusionThread = null;
mLocationManager.removeUpdates(this);
Log.i(TAG, "GPS update unregistered.");
Log.i(TAG, "LocationService stopped.");
}
@SuppressWarnings("unused")
private final class LocationServiceFusionThread extends AbstractSensorWorkerThread {
private static final float ACCEPTABLE_ACCURACY = 15.0F;
// Variables for accepting data from outside
private Location gpsLocation;
private float[] aiwcs;
private volatile long steps = 0;
// Internal data
private boolean initialFix = false;
private Location locationFix;
private long previousSteps = 0;
public LocationServiceFusionThread() {
this(DEFAULT_INTERVAL);
}
public LocationServiceFusionThread(long interval) {
super(interval);
aiwcs = new float[3];
}
public synchronized void pushGPSLocation(Location location) {
if (gpsLocation == null) {
gpsLocation = new Location(location);
} else {
gpsLocation.set(location);
}
// Debug
for (LocationServiceListener listener : mLocationServiceListeners) {
listener.onLocationChanged(gpsLocation);
}
}
private synchronized Location getGPSLocation() {
return gpsLocation;
}
public synchronized void pushStep(float[] aiwcs) {
steps++;
System.arraycopy(aiwcs, 0, this.aiwcs, 0, 3);
}
@Override
public void run() {
while (!isTerminated()) {
Location currentLocation = getGPSLocation();
if (currentLocation != null && currentLocation.hasAccuracy() && currentLocation.getAccuracy() <= ACCEPTABLE_ACCURACY) {
if (initialFix && locationFix != null && steps - previousSteps > 0) {
long stepsTaken = steps - previousSteps;
}
if (!initialFix && locationFix == null) {
locationFix = new Location(currentLocation);
initialFix = true;
previousSteps = steps;
} else if (!initialFix) {
locationFix.set(currentLocation);
initialFix = true;
previousSteps = steps;
}
}
try {
Thread.sleep(getInterval());
} catch (InterruptedException e) {
Log.w(TAG, e.getMessage(), e);
}
}
}
}
public LocationService addListener(LocationServiceListener locationServiceListener) {
if (locationServiceListener != null) {
mLocationServiceListeners.add(locationServiceListener);
return this;
} else {
throw new NullPointerException("LocationServiceListener is null.");
}
}
protected LocationService removeListeners() {
mLocationServiceListeners.clear();
return this;
}
private synchronized void setServiceLevel(int serviceLevel) {
if (serviceLevel != mServiceLevel) {
mServiceLevel = serviceLevel;
for (LocationServiceListener listener : mLocationServiceListeners) {
listener.onServiceLevelChanged(mServiceLevel);
}
}
}
@Override
public void onLocationChanged(Location location) {
synchronized (this) {
mLocationServiceFusionThread.pushGPSLocation(location);
}
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
}
@Override
public void onProviderEnabled(String provider) {
synchronized (this) {
if (provider.equals(LocationManager.GPS_PROVIDER)) {
Log.i(TAG, "GPS enabled.");
setServiceLevel(LEVEL_GPS_ENABLED);
}
}
}
@Override
public void onProviderDisabled(String provider) {
synchronized (this) {
if (provider.equals(LocationManager.GPS_PROVIDER)) {
Log.i(TAG, "GPS disabled.");
setServiceLevel(LEVEL_GPS_NOT_ENABLED);
}
}
}
@Override
public void onStep(float[] values) {
synchronized (this) {
mLocationServiceFusionThread.pushStep(values);
}
}
@Override
public void onMovement(float[] values) {
// Not used
}
}
|
package org.bouncycastle.openssl;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.Writer;
import java.math.BigInteger;
import java.security.Key;
import java.security.KeyPair;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.SecureRandom;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.security.interfaces.DSAPrivateKey;
import java.security.interfaces.RSAPrivateCrtKey;
import java.security.interfaces.RSAPrivateKey;
import javax.crypto.Cipher;
import javax.crypto.SecretKey;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import org.bouncycastle.asn1.ASN1EncodableVector;
import org.bouncycastle.asn1.ASN1InputStream;
import org.bouncycastle.asn1.ASN1OutputStream;
import org.bouncycastle.asn1.ASN1Sequence;
import org.bouncycastle.asn1.DERInteger;
import org.bouncycastle.asn1.DERSequence;
import org.bouncycastle.asn1.pkcs.PrivateKeyInfo;
import org.bouncycastle.asn1.pkcs.RSAPrivateKeyStructure;
import org.bouncycastle.asn1.x509.DSAParameter;
import org.bouncycastle.crypto.PBEParametersGenerator;
import org.bouncycastle.crypto.generators.OpenSSLPBEParametersGenerator;
import org.bouncycastle.crypto.params.KeyParameter;
import org.bouncycastle.jce.PKCS10CertificationRequest;
import org.bouncycastle.util.encoders.Base64;
import org.bouncycastle.util.encoders.Hex;
import org.bouncycastle.x509.X509AttributeCertificate;
import org.bouncycastle.x509.X509V2AttributeCertificate;
/**
* General purpose writer for OpenSSL PEM objects.
*/
public class PEMWriter
extends BufferedWriter
{
/**
* Base constructor.
*
* @param out output stream to use.
*/
public PEMWriter(Writer out)
{
super(out);
}
private void writeHexEncoded(byte[] bytes)
throws IOException
{
bytes = Hex.encode(bytes);
for (int i = 0; i != bytes.length; i++)
{
this.write((char)bytes[i]);
}
}
private void writeEncoded(byte[] bytes)
throws IOException
{
char[] buf = new char[64];
bytes = Base64.encode(bytes);
for (int i = 0; i < bytes.length; i += buf.length)
{
int index = 0;
while (index != buf.length)
{
if ((i + index) >= bytes.length)
{
break;
}
buf[index] = (char)bytes[i + index];
index++;
}
this.write(buf, 0, index);
this.newLine();
}
}
public void writeObject(
Object o)
throws IOException
{
String type;
byte[] encoding;
if (o instanceof X509Certificate)
{
type = "CERTIFICATE";
try
{
encoding = ((X509Certificate)o).getEncoded();
}
catch (CertificateEncodingException e)
{
throw new IOException("Cannot encode object: " + e.toString());
}
}
else if (o instanceof KeyPair)
{
writeObject(((KeyPair)o).getPrivate());
return;
}
else if (o instanceof PrivateKey)
{
ByteArrayInputStream bIn = new ByteArrayInputStream(((Key)o).getEncoded());
ASN1InputStream aIn = new ASN1InputStream(bIn);
PrivateKeyInfo info = new PrivateKeyInfo((ASN1Sequence)aIn.readObject());
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
ASN1OutputStream aOut = new ASN1OutputStream(bOut);
if (o instanceof RSAPrivateKey)
{
type = "RSA PRIVATE KEY";
aOut.writeObject(info.getPrivateKey());
}
else if (o instanceof DSAPrivateKey)
{
type = "DSA PRIVATE KEY";
DSAParameter p = DSAParameter.getInstance(info.getAlgorithmId().getParameters());
ASN1EncodableVector v = new ASN1EncodableVector();
v.add(new DERInteger(0));
v.add(new DERInteger(p.getP()));
v.add(new DERInteger(p.getQ()));
v.add(new DERInteger(p.getG()));
BigInteger x = ((DSAPrivateKey)o).getX();
BigInteger y = p.getG().modPow(x, p.getP());
v.add(new DERInteger(y));
v.add(new DERInteger(x));
aOut.writeObject(new DERSequence(v));
}
else
{
throw new IOException("Cannot identify private key");
}
encoding = bOut.toByteArray();
}
else if (o instanceof PublicKey)
{
type = "PUBLIC KEY";
encoding = ((PublicKey)o).getEncoded();
}
else if (o instanceof X509AttributeCertificate)
{
type = "ATTRIBUTE CERTIFICATE";
encoding = ((X509V2AttributeCertificate)o).getEncoded();
}
else if (o instanceof PKCS10CertificationRequest)
{
type = "CERTIFICATE REQUEST";
encoding = ((PKCS10CertificationRequest)o).getEncoded();
}
else
{
throw new IOException("unknown object passed - can't encode.");
}
this.write("
this.newLine();
writeEncoded(encoding);
this.write("
this.newLine();
}
public void writeObject(
Object o,
String algorithm,
char[] password,
SecureRandom random)
throws IOException
{
byte[] salt = new byte[8];
random.nextBytes(salt);
OpenSSLPBEParametersGenerator pGen = new OpenSSLPBEParametersGenerator();
pGen.init(PBEParametersGenerator.PKCS5PasswordToBytes(password), salt);
SecretKey secretKey = null;
if (algorithm.equalsIgnoreCase("DESEDE"))
{
// generate key
int keyLength = 24;
secretKey = new SecretKeySpec(((KeyParameter)pGen.generateDerivedParameters(keyLength * 8)).getKey(), algorithm);
}
else
{
throw new IOException("unknown algorithm in writeObject");
}
byte[] keyData = null;
if (o instanceof RSAPrivateCrtKey)
{
RSAPrivateCrtKey k = (RSAPrivateCrtKey)o;
RSAPrivateKeyStructure keyStruct = new RSAPrivateKeyStructure(
k.getModulus(),
k.getPublicExponent(),
k.getPrivateExponent(),
k.getPrimeP(),
k.getPrimeQ(),
k.getPrimeExponentP(),
k.getPrimeExponentQ(),
k.getCrtCoefficient());
// convert to bytearray
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
ASN1OutputStream aOut = new ASN1OutputStream(bOut);
aOut.writeObject(keyStruct);
aOut.close();
keyData = bOut.toByteArray();
}
byte[] encData = null;
// cipher
try
{
Cipher c = Cipher.getInstance("DESede/CBC/PKCS5Padding", "BC");
c.init(Cipher.ENCRYPT_MODE, secretKey, new IvParameterSpec(salt));
encData = c.doFinal(keyData);
}
catch (Exception e)
{
throw new IOException("exception using cipher: " + e.toString());
}
// write the data
this.write("
this.newLine();
this.write("Proc-Type: 4,ENCRYPTED");
this.newLine();
this.write("DEK-Info: DES-EDE3-CBC,");
this.writeHexEncoded(salt);
this.newLine();
this.newLine();
this.writeEncoded(encData);
this.write("
}
}
|
package com;
import javax.microedition.lcdui.Graphics;
import javax.microedition.lcdui.Image;
import de.enough.polish.ui.ChoiceItem;
//#if polish.usePolishGui
//#= import de.enough.polish.ui.Style;
//#endif
/**
* @author nick
*
*/
public class CardItem extends ChoiceItem {
private Image lI = null;
private Image rI = null;
private boolean isBothSides = true;
/**
* @param label
*/
public CardItem(String label, int listType) {
super(label, null, listType);
}
//#if polish.usePolishGui
//#= public CardItem( String label, int listType, Style style ) {
//#= super( label, null, listType, style );
//
//#endif
/* (non-Javadoc)
* @see javax.microedition.lcdui.CustomItem#paint(javax.microedition.lcdui.Graphics, int, int)
*/
public void paintContent(int x, int y, int xBorder, int yBorder, Graphics g) {
super.paintContent(x, y, xBorder, yBorder, g);
if ( this.lI != null ) {
if ( isBothSides )
g.drawImage(this.lI, x + this.lI.getWidth() + this.lI.getWidth() / 3, y, Graphics.TOP | Graphics.LEFT );
else
if ( this.rI != null )
g.drawImage(this.lI, x + super.availContentWidth - this.rI.getWidth() - this.rI.getWidth() / 3 - this.lI.getWidth() / 3, y, Graphics.TOP | Graphics.RIGHT );
else
g.drawImage(this.lI, x + super.availContentWidth - this.lI.getWidth() / 3, y, Graphics.TOP | Graphics.RIGHT );
}
if ( this.rI != null ) {
g.drawImage( this.rI, x + super.availContentWidth - this.rI.getWidth() / 3, y, Graphics.TOP | Graphics.RIGHT);
}
}
public void setLeftImage(Image img) {
this.lI = img;
}
public void setRightImage(Image img) {
this.rI = img;
}
public void setBothSides(boolean isBothSides) {
this.isBothSides = isBothSides;
}
}
|
package api.web.gw2.mapping.v2.titles;
import api.web.gw2.mapping.core.IdValue;
import api.web.gw2.mapping.core.LocalizedResource;
import api.web.gw2.mapping.core.OptionalValue;
import api.web.gw2.mapping.core.QuantityValue;
import api.web.gw2.mapping.core.SetValue;
import java.util.Collections;
import java.util.OptionalInt;
import java.util.Set;
public final class JsonpTitle implements Title {
@IdValue
private int id = IdValue.DEFAULT_INTEGER_ID;
@LocalizedResource
private String name = LocalizedResource.DEFAULT;
@IdValue
private int achievement = IdValue.DEFAULT_INTEGER_ID;
@IdValue
@SetValue
private Set<Integer> achievements = Collections.EMPTY_SET;
@QuantityValue
@OptionalValue
private OptionalInt apRequired = OptionalInt.empty();
/**
* Creates a new empty instance.
*/
public JsonpTitle() {
}
@Override
public int getId() {
return id;
}
@Override
public String getName() {
return name;
}
@Override
public int getAchievement() {
return achievement;
}
@Override
public Set<Integer> getAchievements() {
return achievements;
}
@Override
public OptionalInt getApRequired() {
return apRequired;
}
}
|
package ca.sumost.kinetic;
import ca.sumost.kinetic.editor.BoundaryCreator;
import ca.sumost.kinetic.editor.RedAtomCreator;
import ca.sumost.kinetic.editor.WorldEditorListener;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.InputAdapter;
import com.badlogic.gdx.InputMultiplexer;
import com.badlogic.gdx.InputProcessor;
import com.badlogic.gdx.Screen;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer.ShapeType;
import com.badlogic.gdx.input.GestureDetector;
import com.badlogic.gdx.input.GestureDetector.GestureAdapter;
import com.badlogic.gdx.input.GestureDetector.GestureListener;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.physics.box2d.Body;
import com.badlogic.gdx.physics.box2d.Box2DDebugRenderer;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.scenes.scene2d.ui.Container;
import com.badlogic.gdx.scenes.scene2d.ui.Skin;
import com.badlogic.gdx.scenes.scene2d.ui.TextButton;
import com.badlogic.gdx.scenes.scene2d.ui.VerticalGroup;
import com.badlogic.gdx.scenes.scene2d.utils.ChangeListener;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.viewport.ScreenViewport;
public class GameScreen implements Screen
{
private final KineticTheoryGame game;
private final ScreenViewport mViewport;
private final Stage mStage;
private float mZoomFactor = 1;
@SuppressWarnings("unused")
private final Box2DDebugRenderer debugRenderer = new Box2DDebugRenderer();
private final ShapeRenderer mShapeRenderer = new ShapeRenderer();
private final WorldEditorListener mEditorListener;
public GameScreen(final KineticTheoryGame g)
{
game = g;
mViewport = new ScreenViewport();
mViewport.setUnitsPerPixel(1f/20f);
mStage = new Stage();
mStage.addActor(makeRootWidget());
final ScreenConverter sc = new ScreenConverter()
{
@Override
public Vector2 pointToWorld(float xScreen, float yScreen)
{
return mViewport.unproject(new Vector2(xScreen, yScreen));
}
@Override
public Vector2 vectorToWorld(float xScreen, float yScreen)
{
return pointToWorld(xScreen, yScreen).sub(pointToWorld(0, 0));
}
};
InputProcessor zoomByScroll = new InputAdapter()
{
@Override
public boolean scrolled(int amount)
{
float zoomChangeFactor = 0.90f;
float zoomChange = (amount > 0) ? zoomChangeFactor : 1.0f / zoomChangeFactor;
setZoom(mZoomFactor * zoomChange);
return true;
}
};
GestureListener zoomByPinch = new GestureAdapter()
{
@Override
public boolean zoom(float initialDistance, float distance)
{
setZoom(initialDistance / distance);
return true;
}
};
mEditorListener = new WorldEditorListener(g.getWorld(), sc);
InputMultiplexer im = new InputMultiplexer(mStage, zoomByScroll, new GestureDetector(zoomByPinch), new GestureDetector(mEditorListener));
Gdx.input.setInputProcessor(im);
Gdx.graphics.setContinuousRendering(true);
}
private Actor makeRootWidget()
{
Skin skin = game.getSkin();
TextButton redAtomButton = new TextButton("RED atom", skin);
redAtomButton.addListener(new ChangeListener()
{
@Override
public void changed(ChangeEvent event, Actor actor)
{
mEditorListener.setCreator(new RedAtomCreator(game.getWorld()));
}
});
TextButton boundaryButton = new TextButton("Boundary", skin);
boundaryButton.addListener(new ChangeListener()
{
@Override
public void changed(ChangeEvent event, Actor actor)
{
mEditorListener.setCreator(new BoundaryCreator(game.getWorld()));
}
});
VerticalGroup rightButtonBar = new VerticalGroup();
rightButtonBar.addActor(redAtomButton);
rightButtonBar.addActor(boundaryButton);
Container<VerticalGroup> root = new Container<VerticalGroup>(rightButtonBar).top().right();
root.setFillParent(true);
//root.setDebug(true);
return root;
}
@Override
public void render(float delta)
{
Gdx.gl.glClearColor(0, 0, 0.2f, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
mViewport.getCamera().update();
mShapeRenderer.setProjectionMatrix(mViewport.getCamera().combined);
mShapeRenderer.begin(ShapeType.Line);
{
//debugRenderer.render(game.world, camera.combined);
renderWorld();
renderDecorations();
}
mShapeRenderer.end();
mStage.act(delta);
mStage.draw();
game.getWorld().step(1/60f, 6, 2);
}
private void renderWorld()
{
Array<Body> bodies = new Array<Body>();
game.getWorld().getBodies(bodies);
for (Body body : bodies)
{
Object ud = body.getUserData();
if (ud instanceof RenderableBody)
{
((RenderableBody)ud).render(mShapeRenderer, body);
}
}
}
private void renderDecorations()
{
mEditorListener.render(mShapeRenderer);
}
private void setZoom(float zoomFactor)
{
mZoomFactor = zoomFactor;
mViewport.setUnitsPerPixel(zoomFactor/20f);
mViewport.update(mViewport.getScreenWidth(), mViewport.getScreenHeight());
}
@Override
public void resize(int width, int height)
{
mViewport.update(width, height);
mStage.getViewport().update(width, height, true);
}
@Override
public void show()
{
Gdx.graphics.setContinuousRendering(true);
}
@Override
public void hide()
{
Gdx.graphics.setContinuousRendering(false);
}
@Override
public void pause()
{
Gdx.graphics.setContinuousRendering(false);
}
@Override
public void resume()
{
Gdx.graphics.setContinuousRendering(true);
}
@Override
public void dispose()
{
mShapeRenderer.dispose();
mStage.dispose();
}
}
|
package core;
import edu.wpi.first.wpilibj.Encoder;
import edu.wpi.first.wpilibj.Timer;
import utilities.Vars;
import utilities.MySolenoid;
import utilities.MyPIDVelocity;
import utilities.MyJoystick;
import edu.wpi.first.wpilibj.Talon;
/**
* This class is responsible for controlling the shooter and feeder.
* @author Fauzi
*/
// TODO: Find Velocity controller constants.
public class Shooter {
// CONSTANTS
private final double m_dSpeedIncrease = 100;
private final int m_iPulses = 500;
private boolean m_bGoodToShoot = false;
private int m_iFrisbeeShot = 0;
private double m_dShootSpeed = 0;
private double m_dPrevPulseTime = 0;
private MySolenoid m_solFeeder = new MySolenoid(Vars.chnSolFeederDown, Vars.chnSolFeederUp, false);
private MyPIDVelocity m_PIDShooter = new MyPIDVelocity(Vars.kShooterP, Vars.kShooterI, Vars.kShooterD);
private Timer m_tmFeeder = new Timer();
private Timer m_tmPulser = new Timer();
private Talon m_mtShooter = new Talon(Vars.chnVicShooter);
private Encoder m_encShooter = new Encoder(1, Vars.chnEncShooter);
private MyJoystick m_joy;
public Shooter(MyJoystick joystick)
{
m_joy = joystick;
m_tmPulser.start();
}
public void run()
{
if(Vars.fnCanShoot())
{
// When pressed, starts velocity controller so shooter motor can turn on.
if(m_joy.gotPressed(Vars.btShootFrisbee))
{
m_joy.flipSwitch(Vars.btShootFrisbee);
m_tmPulser.start();
if(!m_joy.getSwitch(Vars.btShootFrisbee))
{
m_PIDShooter.reset(true);
m_tmPulser.stop();
m_tmPulser.reset();
}
}
// Sets the shooter to the specified speed.
if(m_joy.getSwitch(Vars.btShootFrisbee))
{
m_mtShooter.set(m_PIDShooter.getOutput(m_dShootSpeed, m_encShooter.getRate()));
if(Math.abs(m_dShootSpeed - m_encShooter.getRate()) <= Vars.dShootTolerance)
m_bGoodToShoot = true;
else
m_bGoodToShoot = false;
// Feeds one frisbee to the shooter if shooter is at the right speed.
if(m_joy.gotPressed(Vars.btFeedFrisbee) && m_bGoodToShoot)
m_joy.flipSwitch(Vars.btFeedFrisbee);
if(m_joy.getSwitch(Vars.btFeedFrisbee))
{
if(!m_solFeeder.getStatus() && m_bGoodToShoot)
{
m_tmFeeder.start();
m_solFeeder.turnOn();
}
if(m_tmFeeder.get() >= Vars.dMinFeedTime)
{
m_solFeeder.turnOff();
m_tmFeeder.stop();
m_tmFeeder.reset();
m_iFrisbeeShot++;
m_bGoodToShoot = false;
}
if(m_iFrisbeeShot >= Vars.iMaxFrisbee)
{
m_iFrisbeeShot = 0;
m_joy.setSwitch(Vars.btFeedFrisbee, false);
}
}
}
// If we're not shooting, shooter should be zero.
else if(!m_joy.getSwitch(Vars.btShootFrisbee))
m_mtShooter.set(0);
// Sets the shooter speed when the increase or decrease button is pressed
if(m_joy.gotPressed(Vars.btIncreaseSpeed))
m_dShootSpeed += m_dSpeedIncrease;
if(m_joy.gotPressed(Vars.btDecreaseSpeed))
m_dShootSpeed -= m_dSpeedIncrease;
Vars.fnPutDashBoardNumberBox(Vars.skShooterSpeed, m_dShootSpeed);
Vars.fnPutDashBoardButton(Vars.skCanFeed, m_bGoodToShoot);
}
}
/**
* Returns the shooter status.
* @return
*/
public double getShooterSpeed()
{
return m_dShootSpeed;
}
/**
* Returns the shooters encoder rate.
* @return
*/
public double getShooterEncoder()
{
double dReturn = 0;
double dCurrent = m_tmPulser.get();
int iCount = m_encShooter.get();
m_encShooter.reset();
// NOTE: 60 seconds per minute; 250 counts per rotation
dReturn = (60.0 / 250.0) * iCount / (dCurrent - m_dPrevPulseTime);
m_dPrevPulseTime = dCurrent;
return dReturn;
}
/**
* Returns the feeder status.
* @return
*/
public boolean getFeedStatus()
{
return m_joy.getSwitch(Vars.btFeedFrisbee);
}
/**
* Sets the shooter to the desired speed.
* @param dSpeed
*/
public void setShooter(double dSpeed)
{
m_mtShooter.set(dSpeed);
}
/**
* Sets the feeder based on the argument, true means feed one frisbee.
* @param bStatus
*/
public void setFeeder(boolean bStatus)
{
m_solFeeder.set(bStatus);
}
}
|
package de.fau.sensorlib.widgets.config;
import android.content.Context;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.DialogFragment;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Objects;
import de.fau.sensorlib.Constants;
import de.fau.sensorlib.R;
public class SensorConfigDialog extends DialogFragment implements View.OnClickListener, SensorConfigBuilder.OnSensorConfigSelectedListener {
private static final String TAG = SensorConfigDialog.class.getSimpleName();
private Context mContext;
private RecyclerView mRecyclerView;
private Button mCancelButton;
private Button mOkButton;
private SensorConfigAdapter mAdapter;
private HashMap<String, Object> mSelectedConfigValues = new HashMap<>();
private HashMap<String, Object> mDefaultConfigValues = new HashMap<>();
private HashMap<String, ConfigItem> mConfigItems = new HashMap<>();
private SensorConfigBuilder mSensorConfigBuilder;
private OnSensorConfigChangedListener mSensorConfigListener;
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.widget_sensor_config_dialog, container);
mContext = getContext();
mConfigItems = (LinkedHashMap<String, ConfigItem>) getArguments().getSerializable(Constants.KEY_SENSOR_CONFIG);
mDefaultConfigValues = (LinkedHashMap<String, Object>) getArguments().getSerializable(Constants.KEY_SENSOR_CONFIG_DEFAULT);
mSelectedConfigValues = (HashMap<String, Object>) mDefaultConfigValues.clone();
String sensorName = getArguments().getString(Constants.KEY_SENSOR_NAME, "n/a");
TextView textView = rootView.findViewById(R.id.tv_header);
textView.setText(getString(R.string.sensor_config, sensorName));
if (mSensorConfigBuilder == null) {
mSensorConfigBuilder = new SensorConfigBuilder(mContext);
mSensorConfigBuilder.setOnSensorConfigSelectedListener(this);
}
mRecyclerView = rootView.findViewById(R.id.recycler_view);
mRecyclerView.setLayoutManager(new LinearLayoutManager(mContext));
mAdapter = new SensorConfigAdapter(mContext, mConfigItems);
mRecyclerView.setAdapter(mAdapter);
mOkButton = rootView.findViewById(R.id.button_ok);
mOkButton.setOnClickListener(this);
mCancelButton = rootView.findViewById(R.id.button_cancel);
mCancelButton.setOnClickListener(this);
return rootView;
}
@Override
public void onClick(View v) {
int id = v.getId();
if (id == R.id.button_ok) {
if (mSensorConfigListener != null) {
mSensorConfigListener.onSensorConfigSelected(mSelectedConfigValues);
}
dismiss();
} else if (id == R.id.button_cancel) {
dismiss();
}
}
@Override
public void onStart() {
super.onStart();
Objects.requireNonNull(getDialog().getWindow()).setLayout(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT);
}
public void setSensorConfigBuilder(SensorConfigBuilder builder) {
mSensorConfigBuilder = builder;
}
public void setSensorConfigListener(OnSensorConfigChangedListener listener) {
mSensorConfigListener = listener;
}
@Override
public void onConfigItemSelected(String key, Object item) {
mSelectedConfigValues.put(key, item);
}
public static class SensorConfigViewHolder extends RecyclerView.ViewHolder {
private String mKey;
private TextView mTitleTextView;
private FrameLayout mConfigView;
public SensorConfigViewHolder(View itemView) {
super(itemView);
mTitleTextView = itemView.findViewById(R.id.tv_config_title);
mConfigView = itemView.findViewById(R.id.config_container);
}
public void setTitle(String title) {
mTitleTextView.setText(title);
}
public void setConfigLayout(SensorConfigBuilder.BaseConfig configItem) {
mConfigView.removeAllViews();
mConfigView.addView(configItem);
}
public void setKey(String key) {
mKey = key;
}
public String getKey() {
return mKey;
}
}
public class SensorConfigAdapter extends RecyclerView.Adapter<SensorConfigViewHolder> {
private Context mContext;
private ArrayList<String> mConfigKeys;
private ArrayList<ConfigItem> mConfigItems;
private SensorConfigAdapter(Context context, HashMap<String, ConfigItem> configItems) {
mContext = context;
mConfigKeys = new ArrayList<>(configItems.keySet());
mConfigItems = new ArrayList<>(configItems.values());
}
@NonNull
@Override
public SensorConfigViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View layout = LayoutInflater.from(mContext).inflate(R.layout.item_sensor_config, parent, false);
return new SensorConfigViewHolder(layout);
}
@Override
public void onBindViewHolder(@NonNull SensorConfigViewHolder holder, int position) {
holder.setKey(mConfigKeys.get(position));
ConfigItem item = mConfigItems.get(position);
holder.setTitle(item.getTitle());
holder.setConfigLayout(mSensorConfigBuilder.buildConfigView(holder.getKey(), item, mDefaultConfigValues.get(mConfigKeys.get(position))));
}
@Override
public int getItemCount() {
return mConfigItems.size();
}
}
}
|
package com.hubspot.singularity.mesos;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.inject.Inject;
import com.hubspot.mesos.json.MesosExecutorObject;
import com.hubspot.mesos.json.MesosSlaveFrameworkObject;
import com.hubspot.mesos.json.MesosSlaveStateObject;
import com.hubspot.mesos.json.MesosTaskObject;
import com.hubspot.singularity.SingularityCloseable;
import com.hubspot.singularity.SingularityCloser;
import com.hubspot.singularity.SingularityTask;
import com.hubspot.singularity.SingularityTaskId;
import com.hubspot.singularity.Utils;
import com.hubspot.singularity.config.SingularityConfiguration;
import com.hubspot.singularity.data.TaskManager;
public class SingularityLogSupport implements SingularityCloseable {
private final static Logger LOG = LoggerFactory.getLogger(SingularityLogSupport.class);
private final MesosClient mesosClient;
private final TaskManager taskManager;
private final ThreadPoolExecutor logLookupExecutorService;
private final SingularityCloser closer;
@Inject
public SingularityLogSupport(SingularityConfiguration configuration, MesosClient mesosClient, TaskManager taskManager, SingularityCloser closer) {
this.mesosClient = mesosClient;
this.taskManager = taskManager;
this.closer = closer;
this.logLookupExecutorService = new ThreadPoolExecutor(configuration.getLogFetchCoreThreads(), configuration.getLogFetchMaxThreads(), 250L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>(), new ThreadFactoryBuilder().setNameFormat("SingularityDirectoryFetcher-%d").build());
}
@Override
public void close() {
closer.shutdown(getClass().getName(), logLookupExecutorService);
}
private Optional<String> findDirectory(SingularityTaskId taskId, List<MesosExecutorObject> executors) {
for (MesosExecutorObject executor : executors) {
for (MesosTaskObject executorTask : executor.getTasks()) {
if (taskId.getId().equals(executorTask.getId())) {
return Optional.of(executor.getDirectory());
}
}
for (MesosTaskObject executorTask : executor.getCompletedTasks()) {
if (taskId.getId().equals(executorTask.getId())) {
return Optional.of(executor.getDirectory());
}
}
}
return Optional.absent();
}
private void loadDirectory(SingularityTask task) {
final long start = System.currentTimeMillis();
final String slaveUri = mesosClient.getSlaveUri(task.getOffer().getHostname());
LOG.info("Fetching slave data to find log directory for task {} from uri {}", task.getTaskId(), slaveUri);
MesosSlaveStateObject slaveState = mesosClient.getSlaveState(slaveUri);
Optional<String> directory = null;
for (MesosSlaveFrameworkObject slaveFramework : slaveState.getFrameworks()) {
directory = findDirectory(task.getTaskId(), slaveFramework.getExecutors());
if (directory.isPresent()) {
break;
}
directory = findDirectory(task.getTaskId(), slaveFramework.getCompletedExecutors());
if (directory.isPresent()) {
break;
}
}
if (!directory.isPresent()) {
LOG.warn("Couldn't find matching executor for task {}", task.getTaskId());
return;
}
LOG.debug("Found a directory {} for task {}", directory.get(), task.getTaskId());
taskManager.updateTaskDirectory(task.getTaskId(), directory.get());
LOG.trace("Updated task {} directory in {}", task.getTaskId(), Utils.duration(start));
}
public void checkDirectory(final SingularityTaskId taskId) {
final Optional<String> maybeDirectory = taskManager.getDirectory(taskId);
if (maybeDirectory.isPresent()) {
LOG.debug("Already had a directory for task {}, skipping lookup", taskId);
return;
}
final Optional<SingularityTask> task = taskManager.getTask(taskId);
if (!task.isPresent()) {
LOG.warn("No task found available for task {}, can't locate directory", taskId);
return;
}
Runnable cmd = generateLookupCommand(task.get());
LOG.trace("Enqueing a request to fetch directory for task: {}, current queue size: {}", taskId, logLookupExecutorService.getQueue().size());
logLookupExecutorService.submit(cmd);
}
private Runnable generateLookupCommand(final SingularityTask task) {
return new Runnable() {
@Override
public void run() {
try {
loadDirectory(task);
} catch (Throwable t) {
LOG.error("While fetching directory for task: {}", task.getTaskId(), t);
}
}
};
}
}
|
package org.springframework.roo.addon.dbre.model;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import org.springframework.roo.addon.dbre.model.dialect.Dialect;
import org.springframework.roo.support.util.Assert;
/**
* Creates a {@link Database database} model from a live database using JDBC.
*
* @author Alan Stewart
* @since 1.1
*/
public class SchemaIntrospector {
private static final String[] TYPES = { TableType.TABLE.name() };
private Connection connection;
private DatabaseMetaData databaseMetaData;
private String catalog;
private Schema schema;
private String tableNamePattern;
private String columnNamePattern;
private String[] types = TYPES;
public SchemaIntrospector(Connection connection, Schema schema) throws SQLException {
Assert.notNull(connection, "Connection must not be null");
this.connection = connection;
catalog = this.connection.getCatalog();
databaseMetaData = this.connection.getMetaData();
this.schema = schema;
}
public SchemaIntrospector(Connection connection) throws SQLException {
this(connection, null);
}
public Connection getConnection() {
return connection;
}
public String getCatalog() {
return catalog;
}
public void setCatalog(String catalog) {
this.catalog = catalog;
}
public Schema getSchema() {
return schema;
}
public String getSchemaPattern() {
return schema != null ? schema.getName() : null;
}
public void setSchema(Schema schema) {
this.schema = schema;
}
public String getTableNamePattern() {
return tableNamePattern;
}
public void setTableNamePattern(String tableNamePattern) {
this.tableNamePattern = tableNamePattern;
}
public String getColumnNamePattern() {
return columnNamePattern;
}
public void setColumnNamePattern(String columnNamePattern) {
this.columnNamePattern = columnNamePattern;
}
public String[] getTypes() {
return types;
}
public void setTypes(String[] types) {
this.types = types;
}
public Set<Schema> getSchemas() throws SQLException {
Set<Schema> schemas = new LinkedHashSet<Schema>();
ResultSet rs = databaseMetaData.getSchemas();
try {
while (rs.next()) {
schemas.add(new Schema(rs.getString("TABLE_SCHEM")));
}
} finally {
rs.close();
}
return schemas;
}
public Database getDatabase() throws SQLException {
Database database = new Database(catalog, schema, readTables());
database.setSequences(readSequences());
return database;
}
private Set<Table> readTables() throws SQLException {
readSequences();
Set<Table> tables = new LinkedHashSet<Table>();
ResultSet rs = databaseMetaData.getTables(catalog, getSchemaPattern(), tableNamePattern, types);
try {
while (rs.next()) {
tableNamePattern = rs.getString("TABLE_NAME");
catalog = rs.getString("TABLE_CAT");
schema = new Schema(rs.getString("TABLE_SCHEM"));
// Check for certain tables such as Oracle recycle bin tables, and ignore
if (ignoreTables()) {
continue;
}
Table table = new Table();
table.setName(tableNamePattern);
table.setCatalog(catalog);
table.setSchema(schema);
table.setDescription(rs.getString("REMARKS"));
table.addColumns(readColumns());
table.addForeignKeys(readForeignKeys());
table.addExportedKeys(readExportedKeys());
table.addIndices(readIndices());
for (String columnName : readPrimaryKeyNames()) {
Column column = table.findColumn(columnName);
if (column != null) {
column.setPrimaryKey(true);
}
}
tables.add(table);
}
} finally {
rs.close();
}
return tables;
}
private boolean ignoreTables() {
boolean ignore = false;
try {
if ("Oracle".equalsIgnoreCase(databaseMetaData.getDatabaseProductName()) && tableNamePattern.startsWith("BIN$")) {
ignore = true;
}
} catch (SQLException ignored) {
}
return ignore;
}
private Set<Column> readColumns() throws SQLException {
Set<Column> columns = new LinkedHashSet<Column>();
ResultSet rs = databaseMetaData.getColumns(catalog, getSchemaPattern(), tableNamePattern, columnNamePattern);
try {
while (rs.next()) {
String columnName = rs.getString("COLUMN_NAME");
columnName = columnName.replaceAll("\\\\", "\\\\\\\\");
Column column = new Column(columnName);
column.setDescription(rs.getString("REMARKS"));
column.setDefaultValue(rs.getString("COLUMN_DEF"));
column.setTypeCode(rs.getInt("DATA_TYPE"));
column.setType(ColumnType.getColumnType(column.getTypeCode())); // "TYPE_NAME"
switch (column.getType()) {
case DECIMAL:
case DOUBLE:
case NUMERIC:
column.setPrecision(rs.getInt("COLUMN_SIZE"));
column.setScale(rs.getInt("DECIMAL_DIGITS"));
column.setLength(0);
break;
default:
column.setLength(rs.getInt("COLUMN_SIZE"));
break;
}
column.setRequired("NO".equalsIgnoreCase(rs.getString("IS_NULLABLE")));
column.setOrdinalPosition(rs.getInt("ORDINAL_POSITION"));
columns.add(column);
}
} finally {
rs.close();
}
return columns;
}
private Set<ForeignKey> readForeignKeys() throws SQLException {
Map<String, ForeignKey> foreignKeys = new LinkedHashMap<String, ForeignKey>();
ResultSet rs = databaseMetaData.getImportedKeys(catalog, getSchemaPattern(), tableNamePattern);
try {
while (rs.next()) {
String name = rs.getString("FK_NAME");
String foreignTableName = rs.getString("PKTABLE_NAME");
String key = name + "_" + foreignTableName;
ForeignKey foreignKey = new ForeignKey(name, foreignTableName);
foreignKey.setOnUpdate(getCascadeAction(rs.getShort("UPDATE_RULE")));
foreignKey.setOnDelete(getCascadeAction(rs.getShort("DELETE_RULE")));
Reference reference = new Reference();
reference.setSequenceNumber(rs.getShort("KEY_SEQ"));
reference.setLocalColumnName(rs.getString("FKCOLUMN_NAME"));
reference.setForeignColumnName(rs.getString("PKCOLUMN_NAME"));
if (foreignKeys.containsKey(key)) {
foreignKeys.get(key).addReference(reference);
} else {
foreignKey.addReference(reference);
foreignKeys.put(key, foreignKey);
}
}
} finally {
rs.close();
}
return new LinkedHashSet<ForeignKey>(foreignKeys.values());
}
private CascadeAction getCascadeAction(Short actionValue) {
CascadeAction cascadeAction;
switch (actionValue.intValue()) {
case DatabaseMetaData.importedKeyCascade:
cascadeAction = CascadeAction.CASCADE;
break;
case DatabaseMetaData.importedKeySetNull:
cascadeAction = CascadeAction.SET_NULL;
break;
case DatabaseMetaData.importedKeySetDefault:
cascadeAction = CascadeAction.SET_DEFAULT;
break;
case DatabaseMetaData.importedKeyRestrict:
cascadeAction = CascadeAction.RESTRICT;
break;
case DatabaseMetaData.importedKeyNoAction:
cascadeAction = CascadeAction.NONE;
break;
default:
cascadeAction = CascadeAction.NONE;
}
return cascadeAction;
}
private Set<ForeignKey> readExportedKeys() throws SQLException {
Map<String, ForeignKey> exportedKeys = new LinkedHashMap<String, ForeignKey>();
ResultSet rs = databaseMetaData.getExportedKeys(catalog, getSchemaPattern(), tableNamePattern);
try {
while (rs.next()) {
String name = rs.getString("FK_NAME");
String foreignTableName = rs.getString("FKTABLE_NAME");
String key = name + "_" + foreignTableName;
ForeignKey foreignKey = new ForeignKey(name, foreignTableName);
foreignKey.setOnUpdate(getCascadeAction(rs.getShort("UPDATE_RULE")));
foreignKey.setOnDelete(getCascadeAction(rs.getShort("DELETE_RULE")));
Reference reference = new Reference();
reference.setSequenceNumber(rs.getShort("KEY_SEQ"));
reference.setLocalColumnName(rs.getString("PKCOLUMN_NAME"));
reference.setForeignColumnName(rs.getString("FKCOLUMN_NAME"));
if (exportedKeys.containsKey(key)) {
exportedKeys.get(key).addReference(reference);
} else {
foreignKey.addReference(reference);
exportedKeys.put(key, foreignKey);
}
}
} finally {
rs.close();
}
return new LinkedHashSet<ForeignKey>(exportedKeys.values());
}
private Set<Index> readIndices() throws SQLException {
Set<Index> indices = new LinkedHashSet<Index>();
ResultSet rs;
try {
// Catching SQLException here due to Oracle throwing exception when attempting to retrieve indices for deleted tables that exist in Oracle's recycle bin
rs = databaseMetaData.getIndexInfo(catalog, getSchemaPattern(), tableNamePattern, false, false);
} catch (SQLException e) {
return indices;
}
if (rs != null) {
try {
while (rs.next()) {
Short type = rs.getShort("TYPE");
if (type == DatabaseMetaData.tableIndexStatistic) {
continue;
}
String indexName = rs.getString("INDEX_NAME");
Index index = findIndex(indexName, indices);
if (index == null) {
index = new Index(indexName);
} else {
indices.remove(index);
}
index.setUnique(!rs.getBoolean("NON_UNIQUE"));
IndexColumn indexColumn = new IndexColumn(rs.getString("COLUMN_NAME"));
indexColumn.setOrdinalPosition(rs.getShort("ORDINAL_POSITION"));
index.addColumn(indexColumn);
indices.add(index);
}
} finally {
rs.close();
}
}
return indices;
}
private Index findIndex(String name, Set<Index> indices) {
for (Index index : indices) {
if (index.getName().equalsIgnoreCase(name)) {
return index;
}
}
return null;
}
private Set<String> readPrimaryKeyNames() throws SQLException {
Set<String> columnNames = new LinkedHashSet<String>();
ResultSet rs = databaseMetaData.getPrimaryKeys(catalog, getSchemaPattern(), tableNamePattern);
try {
while (rs.next()) {
columnNames.add(rs.getString("COLUMN_NAME"));
}
} finally {
rs.close();
}
return columnNames;
}
private Set<Sequence> readSequences() {
Set<Sequence> sequences = new LinkedHashSet<Sequence>();
Dialect dialect = getDialect();
if (dialect != null && dialect.supportsSequences()) {
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
pstmt = connection.prepareStatement(dialect.getQuerySequencesString(schema));
rs = pstmt.executeQuery();
while (rs.next()) {
sequences.add(new Sequence(rs.getString(1)));
}
} catch (SQLException ignored) {}
finally {
if (rs != null) {
try {
rs.close();
} catch (SQLException ignored) {}
}
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException ignored) {}
}
}
}
return sequences;
}
private Dialect getDialect() {
try {
String productName = databaseMetaData.getDatabaseProductName();
return (Dialect) Class.forName("org.springframework.roo.addon.dbre.model.dialect." + productName + "Dialect").newInstance();
} catch (Exception e) {
return null;
}
}
}
|
package ru.stqa.pft.addressbook.tests;
import org.testng.annotations.Test;
import ru.stqa.pft.addressbook.model.ContactData;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
public class ContactAddressTest extends TestBase {
@Test
public void testContactAddresses() {
app.goTo().homePage();
ContactData contact = app.contact().all().iterator().next();
ContactData contactInfoFromEditForm = app.contact().infoFromEditForm(contact);
assertThat(contact.getAddress(), equalTo(contactInfoFromEditForm.getAddress()));
}
}
|
package ru.stqa.pft.addressbook.tests;
import org.testng.Assert;
import org.testng.AssertJUnit;
import org.testng.annotations.Test;
import ru.stqa.pft.addressbook.model.GroupData;
import java.util.Comparator;
import java.util.List;
public class GroupDeletionTests extends TestBase {
@Test
public void testsGroupDeletion() {
app.getNavigationHelper().gotoGroupPage();
if (!app.getGroupHelper().isThereAGroup()) {
app.getGroupHelper().createGroup(new GroupData("Group1", null, null));
}
List<GroupData> before = app.getGroupHelper().getGroupList();
app.getGroupHelper().selectGroup(before.size() - 1);
app.getGroupHelper().deleteSelectedGroups();
app.getGroupHelper().returnToGroupPage();
List<GroupData> after = app.getGroupHelper().getGroupList();
Assert.assertEquals(after.size(), before.size() - 1);
before.remove(before.size() - 1);
Comparator<? super GroupData> byId = (g1, g2) -> Integer.compare(g1.getId(), g2.getId());
before.sort(byId);
after.sort(byId);
Assert.assertEquals(after, before);
}
}
|
package jp.caliconography.one_liners.fragments;
import android.app.Activity;
import android.app.Fragment;
import android.app.FragmentManager;
import android.content.ContentValues;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Path;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.AnimationUtils;
import android.widget.TextView;
import android.widget.Toast;
import com.parse.GetDataCallback;
import com.parse.ParseException;
import com.parse.ParseFile;
import com.parse.ParseObject;
import com.parse.ProgressCallback;
import com.squareup.otto.Subscribe;
import org.json.JSONException;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.concurrent.atomic.AtomicInteger;
import bolts.Capture;
import bolts.Continuation;
import bolts.Task;
import butterknife.ButterKnife;
import butterknife.InjectView;
import butterknife.OnTouch;
import hugo.weaving.DebugLog;
import jp.caliconography.one_liners.R;
import jp.caliconography.one_liners.activities.BookDetailActivity;
import jp.caliconography.one_liners.dummy.DummyContent;
import jp.caliconography.one_liners.event.PopupMenuItemClickedEvent;
import jp.caliconography.one_liners.gesture.TranslationBy1FingerGestureDetector;
import jp.caliconography.one_liners.gesture.TranslationBy2FingerGestureDetector;
import jp.caliconography.one_liners.gesture.TranslationGestureDetector;
import jp.caliconography.one_liners.gesture.TranslationGestureListener;
import jp.caliconography.one_liners.model.LineConfig;
import jp.caliconography.one_liners.model.PaintConfig;
import jp.caliconography.one_liners.model.PointInFloat;
import jp.caliconography.one_liners.model.parseobject.ParseLineConfig;
import jp.caliconography.one_liners.model.parseobject.ParseShapeConfig;
import jp.caliconography.one_liners.model.parseobject.Review;
import jp.caliconography.one_liners.util.BusHolder;
import jp.caliconography.one_liners.util.Utils;
import jp.caliconography.one_liners.util.parse.ParseObjectAsyncProcResult;
import jp.caliconography.one_liners.util.parse.ParseObjectAsyncUtil;
import jp.caliconography.one_liners.widget.PopupMenu;
import jp.caliconography.one_liners.widget.PopupMenuItem;
import jp.caliconography.one_liners.widget.StrokeColorPopupItem;
import jp.caliconography.one_liners.widget.StrokeWidthPopupItem;
/**
* A fragment representing a single book detail screen.
* This fragment is either contained in a {@link jp.caliconography.one_liners.activities.BookListActivity}
* in two-pane mode (on tablets) or a {@link jp.caliconography.one_liners.activities.PhotoDetailActivity}
* on handsets.
*/
public class PhotoDetailFragment extends Fragment {
/**
* The fragment argument representing the item ID that this fragment
* represents.
*/
public static final String ARG_ITEM_ID = "item_id";
public static final int IMAGE_CHOOSER_RESULTCODE = 0;
static final String TAG = PhotoDetailFragment.class.getSimpleName();
public static final int MAX_PHOTO_HEIGHT = 1200;
private static final int NEW_PHOTO_DIALOG_LISTENER_ID = 0;
/**
* The dummy content this fragment is presenting.
*/
private DummyContent.DummyItem mItem;
@InjectView(R.id.progress_container)
View mProgressContainer;
@InjectView(R.id.progress_text)
TextView mProgressText;
@InjectView(R.id.photo)
SurfaceView mPhotoView;
// @InjectView(R.id.load_image)
// Button mBtnLoadImage;
@InjectView(R.id.color_popup)
PopupMenu mColorPopup;
@InjectView(R.id.stroke_width_popup)
PopupMenu mStrokeWidthPopup;
private Uri mPictureUri;
private SurfaceHolder mSurfaceHolder;
private Matrix mMatrix;
private float mTranslateX;
private float mTranslateY;
private float mScale;
private ScaleGestureDetector mScaleGestureDetector;
private TranslationBy1FingerGestureDetector mTranslationBy1FingerGestureDetector;
private TranslationBy2FingerGestureDetector mTranslationBy2FingerGestureDetector;
private Bitmap mBitmap;
private float mPrevX, mPrevY; // matrixtranslate
private float mOriginX, mOriginY;
private float mCurrentX, mCurrentY;
private PaintConfig mPaintConfig = new PaintConfig();
private boolean mSurfaceCreated;
private Paint mPaint;
ArrayList<LineConfig> mLineConfigArray = new ArrayList<LineConfig>();
private Review mReview = null;
/**
* Mandatory empty constructor for the fragment manager to instantiate the
* fragment (e.g. upon screen orientation changes).
*/
public PhotoDetailFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// FragmentMenu
this.setHasOptionsMenu(true);
mReview = ((BookDetailActivity) getActivity()).getCurrentReview();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
ViewGroup rootView = (ViewGroup) inflater.inflate(R.layout.fragment_photo_detail, container, false);
ButterKnife.inject(this, rootView);
createColorPopupMenu();
createStrokeWidthPopupMenu();
mSurfaceHolder = mPhotoView.getHolder();
mSurfaceHolder.addCallback(mSurfaceHolderCallback);
mMatrix = new Matrix();
mScale = 1.0f;
createGetsureDetectors();
// createDefaultPaint();
mPaint = new jp.caliconography.one_liners.model.Paint(true);
// bitmap
if (mReview.getPhotoFile() != null) {
// TaskTaskTask2
final AtomicInteger count = new AtomicInteger(1 + (mReview.getPaintConfigs().size() == 0 ? 0 : 1));
final Capture<byte[]> photoDataBytes = new Capture<byte[]>();
mReview.getOriginalPhotoFile().getDataInBackground(new GetDataCallback() {
@Override
public void done(byte[] bytes, ParseException e) {
photoDataBytes.set(bytes);
if (count.decrementAndGet() == 0) {
drawAll(photoDataBytes.get());
}
}
}, new ProgressCallback() {
@Override
public void done(Integer integer) {
}
});
// fetchTask
ArrayList<Task<ParseObject>> tasks = new ArrayList<Task<ParseObject>>();
final ArrayList<ParseShapeConfig> paintConfigs = mReview.getPaintConfigs();
for (ParseShapeConfig config : paintConfigs) {
tasks.add(ParseObjectAsyncUtil.fetchAsync(config));
}
// fetchTask
Task.whenAll(tasks).onSuccess(new Continuation<Void, Void>() {
@Override
public Void then(Task<Void> task) throws Exception {
if (count.decrementAndGet() == 0) {
drawAll(photoDataBytes.get());
}
return null;
}
});
}
return rootView;
}
private void drawAll(byte[] photoData) {
mBitmap = Utils.getBitmapFromByteArray(photoData);
while (!mSurfaceCreated) {
}
getScaleForFitBitmapToView();
Canvas canvas = null;
try {
canvas = mSurfaceHolder.lockCanvas(null);
if (canvas != null) {
setPhotoBitmapToCanvas(canvas);
// TODO: fetch
ArrayList<ParseShapeConfig> paintConfigs = mReview.getPaintConfigs();
for (ParseShapeConfig config : paintConfigs) {
try {
LineConfig lineConfig = new LineConfig((ParseLineConfig) config);
mLineConfigArray.add(lineConfig);
} catch (JSONException e1) {
e1.printStackTrace();
}
}
renderAllPath(canvas);
}
} finally {
if (canvas != null) {
mSurfaceHolder.unlockCanvasAndPost(canvas);
}
}
}
private void createStrokeWidthPopupMenu() {
ArrayList<PopupMenuItem> lineWidthMenuItems = new ArrayList<PopupMenuItem>();
lineWidthMenuItems.add(new StrokeWidthPopupItem(getActivity().getApplicationContext(), 1, PaintConfig.StrokeWidth.THIN, R.drawable.btn_oval_common_back));
lineWidthMenuItems.add(new StrokeWidthPopupItem(getActivity().getApplicationContext(), 2, PaintConfig.StrokeWidth.MID, R.drawable.btn_oval_common_back));
lineWidthMenuItems.add(new StrokeWidthPopupItem(getActivity().getApplicationContext(), 3, PaintConfig.StrokeWidth.FAT, R.drawable.btn_oval_common_back));
mStrokeWidthPopup.addItems(lineWidthMenuItems);
}
private void createColorPopupMenu() {
ArrayList<PopupMenuItem> colorMenuItems = new ArrayList<PopupMenuItem>();
colorMenuItems.add(new StrokeColorPopupItem(getActivity().getApplicationContext(), 1, PaintConfig.StrokeColor.RED, R.drawable.btn_oval_common_back));
colorMenuItems.add(new StrokeColorPopupItem(getActivity().getApplicationContext(), 2, PaintConfig.StrokeColor.BLUE, R.drawable.btn_oval_common_back));
colorMenuItems.add(new StrokeColorPopupItem(getActivity().getApplicationContext(), 3, PaintConfig.StrokeColor.GREEN, R.drawable.btn_oval_common_back));
colorMenuItems.add(new StrokeColorPopupItem(getActivity().getApplicationContext(), 4, PaintConfig.StrokeColor.BLACK, R.drawable.btn_oval_common_back));
mColorPopup.addItems(colorMenuItems);
}
private void tryToSetPhotoBitmapToCanvas() {
if (mSurfaceCreated) {
getScaleForFitBitmapToView();
Canvas canvas = null;
try {
canvas = mSurfaceHolder.lockCanvas(null);
if (canvas != null) {
setPhotoBitmapToCanvas(canvas);
}
} finally {
if (canvas != null) {
mSurfaceHolder.unlockCanvasAndPost(canvas);
}
}
}
}
@Override
public void onResume() {
super.onResume();
BusHolder.get().register(this);
}
@Override
public void onPause() {
BusHolder.get().unregister(this);
super.onPause();
}
private void createGetsureDetectors() {
mScaleGestureDetector = new ScaleGestureDetector(getActivity().getApplicationContext(), mOnScaleListener);
mTranslationBy1FingerGestureDetector = new TranslationBy1FingerGestureDetector(mTranslationBy1FingerListener);
mTranslationBy2FingerGestureDetector = new TranslationBy2FingerGestureDetector(mTranslationBy2FingerListener);
}
// private void createDefaultPaint() {
// mPaint = new Paint();
// mPaint.setAntiAlias(true);
// mPaint.setDither(true);
// mPaint.setColor(0x88cdcdcd);
// mPaint.setStyle(Paint.Style.STROKE);
// mPaint.setStrokeJoin(Paint.Join.ROUND);
// mPaint.setStrokeCap(Paint.Cap.ROUND);
// mPaint.setStrokeWidth(20);
// @OnClick(R.id.load_image)
// void onClickLoadImage(View view) {
// launchChooser();
@OnTouch(R.id.photo)
boolean onTouchPhoto(View view, MotionEvent motionEvent) {
if (mBitmap != null) {
mScaleGestureDetector.onTouchEvent(motionEvent);
mTranslationBy1FingerGestureDetector.onTouch(view, motionEvent);
mTranslationBy2FingerGestureDetector.onTouch(view, motionEvent);
}
return true;
}
private final SurfaceHolder.Callback mSurfaceHolderCallback = new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
mSurfaceCreated = true;
Canvas canvas = null;
try {
canvas = mSurfaceHolder.lockCanvas(null);
if (canvas != null) {
if (mBitmap != null) {
getScaleForFitBitmapToView();
setPhotoBitmapToCanvas(canvas);
}
}
} finally {
if (canvas != null) {
mSurfaceHolder.unlockCanvasAndPost(canvas);
}
}
}
@DebugLog
@Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
}
};
private final ScaleGestureDetector.SimpleOnScaleGestureListener mOnScaleListener = new ScaleGestureDetector.SimpleOnScaleGestureListener() {
@Override
public boolean onScaleBegin(ScaleGestureDetector detector) {
return super.onScaleBegin(detector);
}
@Override
public void onScaleEnd(ScaleGestureDetector detector) {
super.onScaleEnd(detector);
}
@Override
public boolean onScale(ScaleGestureDetector detector) {
mScale *= detector.getScaleFactor();
Log.d(TAG, "mScale=" + Float.toString(mScale));
Canvas canvas = null;
try {
canvas = mSurfaceHolder.lockCanvas(null);
if (canvas != null) {
setPhotoBitmapToCanvas(canvas);
drawTempPath(canvas);
Log.d(TAG, "______onScale______");
renderAllPath(canvas);
}
} finally {
if (canvas != null) {
mSurfaceHolder.unlockCanvasAndPost(canvas);
}
}
return true;
}
};
private final TranslationGestureListener mTranslationBy1FingerListener = new TranslationGestureListener() {
@DebugLog
@Override
public void onTranslationEnd(TranslationGestureDetector detector) {
Canvas canvas = null;
try {
canvas = mSurfaceHolder.lockCanvas(null);
if (canvas != null) {
setPhotoBitmapToCanvas(canvas);
Log.d(TAG, "______onTranslationEnd______");
renderAllPath(canvas);
fixPath(canvas);
}
} finally {
if (canvas != null) {
mSurfaceHolder.unlockCanvasAndPost(canvas);
}
}
}
@DebugLog
@Override
public void onTranslationBegin(TranslationGestureDetector detector) {
TranslationBy1FingerGestureDetector oneFingerDetector = (TranslationBy1FingerGestureDetector) detector;
mCurrentX = mOriginX = oneFingerDetector.getX();
mCurrentY = mOriginY = oneFingerDetector.getY();
}
@DebugLog
@Override
public void onTranslation(TranslationGestureDetector detector) {
TranslationBy1FingerGestureDetector oneFingerDetector = (TranslationBy1FingerGestureDetector) detector;
mCurrentX = oneFingerDetector.getX();
mCurrentY = oneFingerDetector.getY();
Canvas canvas = null;
try {
canvas = mSurfaceHolder.lockCanvas(null);
if (canvas != null) {
setPhotoBitmapToCanvas(canvas);
renderAllPath(canvas);
drawTempPath(canvas);
}
} finally {
if (canvas != null) {
mSurfaceHolder.unlockCanvasAndPost(canvas);
}
}
}
};
private final TranslationGestureListener mTranslationBy2FingerListener = new TranslationGestureListener() {
@DebugLog
@Override
public void onTranslationEnd(TranslationGestureDetector detector) {
}
@Override
public void onTranslationBegin(TranslationGestureDetector detector) {
TranslationBy2FingerGestureDetector twoFingerDetector = (TranslationBy2FingerGestureDetector) detector;
mPrevX = twoFingerDetector.getFocusX();
mPrevY = twoFingerDetector.getFocusY();
}
@DebugLog
@Override
public void onTranslation(TranslationGestureDetector detector) {
TranslationBy2FingerGestureDetector twoFingerDetector = (TranslationBy2FingerGestureDetector) detector;
mTranslateX += twoFingerDetector.getFocusX() - mPrevX;
mTranslateY += twoFingerDetector.getFocusY() - mPrevY;
mPrevX = twoFingerDetector.getFocusX();
mPrevY = twoFingerDetector.getFocusY();
Canvas canvas = null;
try {
canvas = mSurfaceHolder.lockCanvas(null);
if (canvas != null) {
setPhotoBitmapToCanvas(canvas);
drawTempPath(canvas);
Log.d(TAG, "______onTranslation______");
renderAllPath(canvas);
}
} finally {
if (canvas != null) {
mSurfaceHolder.unlockCanvasAndPost(canvas);
}
}
}
};
private void renderAllPath(Canvas canvas, boolean withTranslate, boolean withScale) {
for (LineConfig lineConfig : mLineConfigArray) {
Path path = new Path();
// (0, 0)
setLineOnOrigin(lineConfig, path);
path.transform(buildMatrixForPanZoom(lineConfig, withTranslate, withScale));
// PathPaintline.getPaint().setStrokeWidth()
Paint paint = new Paint(lineConfig.getPaint());
paint.setStrokeWidth(lineConfig.getPaint().getUnScaledStrokeWidth().getWidthInt() * (withScale ? mScale : 1));
canvas.drawPath(path, paint);
path.reset();
}
}
private void renderAllPath(Canvas canvas) {
renderAllPath(canvas, true, true);
}
private void renderAllPathIgnoreTranslateAndScale(Canvas canvas) {
renderAllPath(canvas, false, false);
}
private Matrix buildMatrixForPanZoom(LineConfig lineConfig, boolean withTranslate, boolean withScale) {
PointInFloat lineCenter = PointInFloat.getMidpoint(new PointInFloat(lineConfig.getStartX(), lineConfig.getStartY()), new PointInFloat(lineConfig.getEndX(), lineConfig.getEndY()));
float[] valueHolder = getMatrixFloats(lineConfig.getMatrix());
Matrix matrix = new Matrix();
float scaleOfThisLine = (withScale ? mScale : 1) / valueHolder[0];
// scale(valueHolder[0])(mScale) = mScale / valueHolder[0]
matrix.postScale(scaleOfThisLine, scaleOfThisLine);
// translate
matrix.postTranslate(lineCenter.x * scaleOfThisLine, lineCenter.y * scaleOfThisLine);
matrix.postTranslate(-lineConfig.getTranslateX() * scaleOfThisLine, -lineConfig.getTranslateY() * scaleOfThisLine);
if (withTranslate) {
matrix.postTranslate(mTranslateX, mTranslateY);
}
return matrix;
}
private float[] getMatrixFloats(Matrix matrix) {
float[] valueHolder = new float[9];
matrix.getValues(valueHolder);
return valueHolder;
}
private void setLineOnOrigin(LineConfig lineConfig, Path path) {
PointInFloat lineCenter = PointInFloat.getMidpoint(new PointInFloat(lineConfig.getStartX(), lineConfig.getStartY()), new PointInFloat(lineConfig.getEndX(), lineConfig.getEndY()));
path.moveTo(lineConfig.getStartX() - lineCenter.x, lineConfig.getStartY() - lineCenter.y);
path.lineTo(lineConfig.getEndX() - lineCenter.x, lineConfig.getEndY() - lineCenter.y);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == IMAGE_CHOOSER_RESULTCODE) {
if (resultCode != Activity.RESULT_OK) {
if (mPictureUri != null) {
getActivity().getContentResolver().delete(mPictureUri, null, null);
mPictureUri = null;
}
return;
}
mBitmap = Utils.scaleDownBitmap(getBitmapFromLocalFile(data), MAX_PHOTO_HEIGHT, getActivity());
tryToSetPhotoBitmapToCanvas();
mPictureUri = null;
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
// Inflate the menu; this adds items to the action bar if it is present.
inflater.inflate(R.menu.photo_detail, menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == android.R.id.home) {
returnToBookDetail();
return true;
} else if (id == R.id.new_photo) {
if (mBitmap != null) {
DialogFragment
.newInstance(true)
.setTitle(R.string.dialog_title_confirm)
.setMessage(R.string.dialog_confirm_message_overwrite_photo)
.setPositiveButtonText(R.string.dialog_posigive_button_text)
.setNegativeButtonText(R.string.dialog_negative_button_text)
.setListener(NEW_PHOTO_DIALOG_LISTENER_ID, new DialogFragment.IDialogFragmentListener() {
@Override
public void onEvent(int id, int event) {
switch (event) {
case DialogFragment.IDialogFragmentListener.ON_POSITIVE_BUTTON_CLICKED:
launchChooser();
break;
case DialogFragment.IDialogFragmentListener.ON_NEGATIVE_BUTTON_CLICKED:
case DialogFragment.IDialogFragmentListener.ON_NEUTRAL_BUTTON_CLICKED:
case DialogFragment.IDialogFragmentListener.ON_CLOSE_BUTTON_CLICKED:
case DialogFragment.IDialogFragmentListener.ON_CANCEL:
break;
}
}
})
.show(getFragmentManager());
} else {
launchChooser();
}
} else if (id == R.id.save_photo) {
if (mBitmap == null) {
return false;
}
if (Utils.isOffline(getActivity())) {
// TODO:
Toast.makeText(
getActivity().getApplicationContext(),
"",
Toast.LENGTH_SHORT).show();
return false;
}
showProgressBar();
ArrayList<Task<ParseObjectAsyncProcResult>> tasks = new ArrayList<Task<ParseObjectAsyncProcResult>>();
ArrayList<ParseShapeConfig> paintConfigs = new ArrayList<ParseShapeConfig>();
for (LineConfig config : mLineConfigArray) {
try {
ParseLineConfig confParseObj = new ParseLineConfig();
confParseObj.setConfig(config);
paintConfigs.add(confParseObj);
} catch (JSONException e) {
e.printStackTrace();
}
}
mReview.setPaintConfigs(paintConfigs);
tasks.add(ParseObjectAsyncUtil.saveAsync(mReview));
Bitmap bitmap = Bitmap.createBitmap((int) (mBitmap.getWidth()), (int) (mBitmap.getHeight()), Bitmap.Config.ARGB_8888);
// view Bitmap
Canvas canvas = new Canvas(bitmap); // bitmap Canvas
setPhotoBitmapToCanvasIgnoreTranslateAndScale(canvas);
final ParseFile originalPhotoFile = new ParseFile("original.png", Utils.bitmapToByte(bitmap));
tasks.add(ParseObjectAsyncUtil.saveAsync(originalPhotoFile));
renderAllPathIgnoreTranslateAndScale(canvas);
// bitmap
final ParseFile file = new ParseFile("photo.png", Utils.bitmapToByte(bitmap));
tasks.add(ParseObjectAsyncUtil.saveAsync(file));
Task.whenAll(tasks).continueWith(new Continuation<Void, Void>() {
@Override
public Void then(Task<Void> task) throws Exception {
if (task.getError() == null) {
addOriginalPhotoToReview(originalPhotoFile);
addPhotoToReview(file);
returnToBookDetail();
} else {
hideProgressBar();
// TODO:
Toast.makeText(
getActivity().getApplicationContext(),
"Error saving: " + task.getError().getMessage(),
Toast.LENGTH_SHORT).show();
}
return null;
}
});
bitmap.recycle();
mBitmap.recycle();
return true;
}
return super.onOptionsItemSelected(item);
}
private void hideProgressBar() {
mProgressContainer.startAnimation(AnimationUtils.loadAnimation(getActivity(), android.R.anim.fade_out));
mProgressContainer.setVisibility(View.GONE);
}
private void showProgressBar() {
mProgressContainer.startAnimation(AnimationUtils.loadAnimation(getActivity(), android.R.anim.fade_in));
mProgressContainer.setVisibility(View.VISIBLE);
}
private void getScaleForFitBitmapToView() {
float heightScale = (float) mSurfaceHolder.getSurfaceFrame().height() / (float) mBitmap.getHeight();
float widthScale = (float) mSurfaceHolder.getSurfaceFrame().width() / (float) mBitmap.getWidth();
mScale = Math.min(heightScale, widthScale);
}
private Bitmap getBitmapFromLocalFile(Intent intent) {
// InputStream
InputStream in = null;
Bitmap bitmap = null;
BitmapFactory.Options options = new BitmapFactory.Options();
options.inMutable = true; // bitmap= canvasmutablenew Canvas(bitmap)
try {
Uri result = (intent == null) ? mPictureUri : intent.getData();
in = getActivity().getContentResolver().openInputStream(result);
// Bitmap
bitmap = BitmapFactory.decodeStream(in, null, options);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
}
}
}
return bitmap;
}
private void setPhotoBitmapToCanvas(Canvas canvas, boolean withTranslate, boolean withScale) {
mMatrix.reset();
if (withScale) {
mMatrix.postScale(mScale, mScale);
}
if (withTranslate) {
mMatrix.postTranslate(mTranslateX, mTranslateY);
}
canvas.drawColor(Color.WHITE); // matrix
canvas.drawBitmap(mBitmap, mMatrix, null);
}
private void setPhotoBitmapToCanvas(Canvas canvas) {
setPhotoBitmapToCanvas(canvas, true, true);
}
private void setPhotoBitmapToCanvasIgnoreTranslateAndScale(Canvas canvas) {
setPhotoBitmapToCanvas(canvas, false, false);
}
private LineConfig drawPath(Canvas canvas, int color) {
// PathPaintline.getPaint().setStrokeWidth()
jp.caliconography.one_liners.model.Paint paint = new jp.caliconography.one_liners.model.Paint(mPaint);
paint.setColor(color);
paint.setStrokeWidth(mPaintConfig.getStrokeWidth().getWidthInt() * mScale);
paint.setUnScaledStrokeWidth(mPaintConfig.getStrokeWidth());
Path path = new Path();
path.moveTo(mOriginX, mOriginY);
path.lineTo(mCurrentX, mCurrentY);
canvas.drawPath(path, paint);
path.reset();
return new LineConfig(mOriginX, mOriginY, mCurrentX, mCurrentY, paint, new Matrix(mMatrix), mTranslateX, mTranslateY);
}
private void drawTempPath(Canvas canvas) {
drawPath(canvas, 0x88cdcdcd);
}
private void fixPath(Canvas canvas) {
LineConfig lineConfig = drawPath(canvas, mPaintConfig.getColor().getColorInt());
// TODO
if (mOriginX != mCurrentX || mOriginY != mCurrentY) {
mLineConfigArray.add(lineConfig);
}
}
private void launchChooser() {
Intent imagePickIntent = getImagePickIntent();
Intent cameraIntent = getCameraIntent();
// IntentcreateChooser()
Intent chooserIntent = Intent.createChooser(imagePickIntent, "Pick Image");
// EXTRA_INITIAL_INTENTS Intent
chooserIntent.putExtra(Intent.EXTRA_INITIAL_INTENTS, new Intent[]{cameraIntent});
startActivityForResult(chooserIntent, IMAGE_CHOOSER_RESULTCODE);
}
private Intent getCameraIntent() {
String filename = System.currentTimeMillis() + ".jpg";
ContentValues values = new ContentValues();
values.put(MediaStore.Images.Media.TITLE, filename);
values.put(MediaStore.Images.Media.MIME_TYPE, "image/jpeg");
mPictureUri = getActivity().getContentResolver().insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
Intent i2 = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
i2.putExtra(MediaStore.EXTRA_OUTPUT, mPictureUri);
return i2;
}
private Intent getImagePickIntent() {
Intent i = new Intent(Intent.ACTION_GET_CONTENT);
|
package boundary;
import boundary.BuilderApplication;
import controller.BuilderMainMenuCtrl;
import controller.BuilderRedoCtrl;
import controller.BuilderUndoCtrl;
import java.awt.Dimension;
import javax.swing.BoxLayout;
import javax.swing.ButtonGroup;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.JSlider;
import model.BuilderModel;
import java.awt.FlowLayout;
/**
* BuilderLevelEditorView for the builder application
*
* @author Cem Unsal & Maurizio Vitale
*/
public class BuilderLevelEditorView extends JPanel {
BuilderApplication app;
BuilderModel model;
public PlayerBoardView boardView;
public BuilderLevelEditorView(BuilderApplication app, BuilderModel model) {
this.app = app;
this.model = model;
setMinimumSize(new Dimension(600,480));
setPreferredSize(new Dimension(600, 480));
setLayout(new BoxLayout(this,BoxLayout.Y_AXIS));
//panelControls contain all Top control buttons
JPanel panelTopControls = new JPanel();
panelTopControls.setAlignmentX(0);
panelTopControls.setAlignmentY(0);
panelTopControls.setLayout(new BoxLayout(panelTopControls, BoxLayout.X_AXIS));
add(panelTopControls);
JButton btnNew = new JButton("New");
panelTopControls.add(btnNew);
JButton btnOpen = new JButton("Open");
panelTopControls.add(btnOpen);
JButton btnSave = new JButton("Save");
panelTopControls.add(btnSave);
JButton btnClose = new JButton("Close");
btnClose.addActionListener(new BuilderMainMenuCtrl(app, model));
panelTopControls.add(btnClose);
//panelGame contains all Game related options
JPanel panelGame = new JPanel();
panelGame.setAlignmentX(0);
panelGame.setLayout(new BoxLayout(panelGame, BoxLayout.X_AXIS));
add(panelGame);
//panelControls contains controls related to the board
JPanel panelControls = new JPanel();
panelControls.setLayout(new BoxLayout(panelControls, BoxLayout.Y_AXIS));
panelControls.setAlignmentY(0);
panelGame.add(panelControls);
JButton btnUndo = new JButton("Undo");
btnUndo.addActionListener(new BuilderUndoCtrl(app, model));
panelControls.add(btnUndo);
JButton btnRedo = new JButton("Redo");
btnUndo.addActionListener(new BuilderRedoCtrl(app, model));
panelControls.add(btnRedo);
JButton btnMakePlayable = new JButton("[ ]");
panelControls.add(btnMakePlayable);
JButton btnMakeInert = new JButton("[X]");
panelControls.add(btnMakeInert);
JButton btnMakeSix = new JButton("6");
panelControls.add(btnMakeSix);
JButton btnMakeSlot = new JButton("[6]");
panelControls.add(btnMakeSlot);
//panelBoard contains panelType and panelGrid
JPanel panelBoard = new JPanel();
panelBoard.setAlignmentY(0);
panelGame.add(panelBoard);
panelBoard.setLayout(new BoxLayout(panelBoard, BoxLayout.Y_AXIS));
//panelType contains mutually exclusive radio buttons
JPanel panelType = new JPanel();
panelType.setLayout(new FlowLayout(FlowLayout.CENTER, 5, 5));
panelBoard.add(panelType);
JRadioButton rdbtnPuzzle = new JRadioButton("Puzzle");
panelType.add(rdbtnPuzzle);
JRadioButton rdbtnLightning = new JRadioButton("Lightning");
panelType.add(rdbtnLightning);
JRadioButton rdbtnElimination = new JRadioButton("Elimination");
panelType.add(rdbtnElimination);
JRadioButton rdbtnRelease = new JRadioButton("Release");
panelType.add(rdbtnRelease);
//Button Group contains radio Buttons to make them mutually exclusive
ButtonGroup gameType = new ButtonGroup();
gameType.add(rdbtnPuzzle);
gameType.add(rdbtnLightning);
gameType.add(rdbtnElimination);
gameType.add(rdbtnRelease);
//panelGrid contains the Board Inside
JPanel panelGrid = new JPanel();
panelGrid.setLayout(new BoxLayout(panelGrid, BoxLayout.X_AXIS));
panelGrid.setSize(400,400);
panelBoard.add(panelGrid);
//panelSliders contain probability Sliders for number generation
JPanel panelSliders = new JPanel();
panelGame.add(panelSliders);
panelSliders.setAlignmentY(0);
panelSliders.setLayout(new BoxLayout(panelSliders, BoxLayout.Y_AXIS));
//Code for each slider
int x = 20;//Major Tick Spacing
int y = 10;//Minor Tick Spacing
//Slider 1
//Make the panel
JPanel panelSlider1 = new JPanel();
panelSlider1.setLayout(new BoxLayout(panelSlider1, BoxLayout.X_AXIS));
panelSliders.add(panelSlider1);
//Make the Label
JLabel lblNum1 = new JLabel("1");
panelSlider1.add(lblNum1);
//Make the Slider
JSlider slider1 = new JSlider();
slider1.setPaintLabels(true);
slider1.setMajorTickSpacing(x);
slider1.setMinorTickSpacing(y);
slider1.setSnapToTicks(true);
slider1.setPaintTicks(true);
panelSlider1.add(slider1);
//Slider 2
//Make the panel
JPanel panelSlider2 = new JPanel();
panelSlider2.setLayout(new BoxLayout(panelSlider2, BoxLayout.X_AXIS));
panelSliders.add(panelSlider2);
//Make the Label
JLabel lblNum2 = new JLabel("2");
panelSlider2.add(lblNum2);
//Make the Slider
JSlider slider2 = new JSlider();
slider2.setPaintLabels(true);
slider2.setMajorTickSpacing(x);
slider2.setMinorTickSpacing(y);
slider2.setSnapToTicks(true);
slider2.setPaintTicks(true);
panelSlider2.add(slider2);
//Slider 3
//Make the panel
JPanel panelSlider3 = new JPanel();
panelSlider3.setLayout(new BoxLayout(panelSlider3, BoxLayout.X_AXIS));
panelSliders.add(panelSlider3);
//Make the Label
JLabel lblNum3 = new JLabel("3");
panelSlider3.add(lblNum3);
//Make the Slider
JSlider slider3 = new JSlider();
slider3.setPaintLabels(true);
slider3.setMajorTickSpacing(x);
slider3.setMinorTickSpacing(y);
slider3.setSnapToTicks(true);
slider3.setPaintTicks(true);
panelSlider3.add(slider3);
//Slider 4
//Make the panel
JPanel panelSlider4 = new JPanel();
panelSlider4.setLayout(new BoxLayout(panelSlider4, BoxLayout.X_AXIS));
panelSliders.add(panelSlider4);
//Make the Label
JLabel lblNum4 = new JLabel("4");
panelSlider4.add(lblNum4);
//Make the Slider
JSlider slider4 = new JSlider();
slider4.setPaintLabels(true);
slider4.setMajorTickSpacing(x);
slider4.setMinorTickSpacing(y);
slider4.setSnapToTicks(true);
slider4.setPaintTicks(true);
panelSlider4.add(slider4);
//Slider 5
//Make the panel
JPanel panelSlider5 = new JPanel();
panelSlider5.setLayout(new BoxLayout(panelSlider5, BoxLayout.X_AXIS));
panelSliders.add(panelSlider5);
//Make the Label
JLabel lblNum5 = new JLabel("5");
panelSlider5.add(lblNum5);
//Make the Slider
JSlider slider5 = new JSlider();
slider5.setPaintLabels(true);
slider5.setMajorTickSpacing(x);
slider5.setMinorTickSpacing(y);
slider5.setSnapToTicks(true);
slider5.setPaintTicks(true);
panelSlider5.add(slider5);
//Slider 6
//Make the panel
JPanel panelSlider6 = new JPanel();
panelSlider6.setLayout(new BoxLayout(panelSlider6, BoxLayout.X_AXIS));
panelSliders.add(panelSlider6);
//Make the Label
JLabel lblNum6 = new JLabel("6");
panelSlider6.add(lblNum6);
//Make the Slider
JSlider slider6 = new JSlider();
slider6.setPaintLabels(true);
slider6.setMajorTickSpacing(x);
slider6.setMinorTickSpacing(y);
slider6.setSnapToTicks(true);
slider6.setPaintTicks(true);
panelSlider6.add(slider6);
}
}
|
// Unless required by applicable law or agreed to in writing, software /
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. /
package br.shura.venus.executor;
import br.shura.venus.component.Container;
import br.shura.venus.component.Script;
import br.shura.venus.exception.runtime.UndefinedVariableException;
import br.shura.venus.resultor.Variable;
import br.shura.venus.value.Value;
import br.shura.x.collection.map.Map;
import br.shura.x.collection.map.impl.LinkedMap;
import br.shura.x.lang.ICloneable;
import br.shura.x.lang.annotation.Internal;
import br.shura.x.util.layer.XApi;
public class Context implements ICloneable<Context> {
private int currentLine;
private VenusExecutor executor;
private final Container owner;
private final Context parent;
private final Map<String, VariableStructure> variables;
public Context(Container owner, Context parent) {
this.owner = owner;
this.parent = parent;
this.variables = new LinkedMap<>();
}
@Override
public Context clone() {
Context context = new Context(getOwner(), getParent());
context.getVariables().addAll(getVariables());
context.setCurrentLine(currentLine());
return context;
}
public VenusExecutor currentExecutor() {
return executor != null ? executor : hasParent() ? getParent().currentExecutor() : null;
}
public int currentLine() {
return currentLine;
}
public Container getOwner() {
return owner;
}
public Context getParent() {
return parent;
}
public Script getScript() {
return getOwner().getScript();
}
public VariableStructure getVar(String name) throws UndefinedVariableException {
if (name.length() > 1 && name.charAt(0) == '$') {
return getOwner().getApplicationContext().getVar(name.substring(1));
}
if (isOwnerOf(name)) {
return getVariables().get(name);
}
if (hasParent()) {
try {
return getParent().getVar(name);
}
catch (UndefinedVariableException exception) {
}
}
throw new UndefinedVariableException(this, name);
}
public VariableStructure getVar(Variable variable) throws UndefinedVariableException {
return getVar(variable.getName());
}
public Value getVarValue(String name) throws UndefinedVariableException {
XApi.requireNonNull(name, "name");
return getVar(name).getValue();
}
public Value getVarValue(Variable variable) throws UndefinedVariableException {
return getVarValue(variable.getName());
}
public Map<String, VariableStructure> getVariables() {
return variables;
}
public boolean hasParent() {
return getParent() != null;
}
public boolean hasVar(String name) throws UndefinedVariableException {
if (name.length() > 1 && name.charAt(0) == '$') {
return getOwner().getApplicationContext().hasVar(name.substring(1));
}
return isOwnerOf(name) || (hasParent() && getParent().hasVar(name));
}
public boolean isOwnerOf(String name) {
return getVariables().containsKey(name);
}
public void setVar(String name, Value value) {
if (!changeVar(name, value)) {
getVariables().add(name, new VariableStructure(value));
}
}
@Override
public String toString() {
return "context(owner=" + getOwner() + ", vars=" + getVariables() + ", parent=" + getParent() + ')';
}
protected boolean changeVar(String name, Value value) {
if (name.length() > 1 && name.charAt(0) == '$') {
getOwner().getApplicationContext().setVar(name.substring(1), value);
return true;
}
if (isOwnerOf(name)) {
getVariables().get(name).setValue(value);
return true;
}
return hasParent() && getParent().changeVar(name, value);
}
@Internal
protected void setCurrentLine(int currentLine) {
this.currentLine = currentLine;
}
@Internal
protected void setExecutor(VenusExecutor executor) {
this.executor = executor;
}
}
|
package br.ufrj.cos.redes.sender;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketException;
import java.util.Calendar;
import java.util.Timer;
import java.util.TimerTask;
import br.ufrj.cos.redes.fileAccess.Chunk;
import br.ufrj.cos.redes.fileAccess.FileChunkRetriever;
import br.ufrj.cos.redes.packet.EndPacket;
import br.ufrj.cos.redes.packet.InitPacket;
import br.ufrj.cos.redes.packet.Package;
public class Sender {
private DatagramSocket serverSocket;
private int port;
private ReceiverInfo receiverInfo;
private final long DELAY = 20; //milliseconds
private final long INTERVAL = 20; //milliseconds
public Sender(int port) {
this.port = port;
}
public InitPacket initSender() throws IOException {
try {
serverSocket = new DatagramSocket(port);
} catch(SocketException e) {
throw new IOException("The port number " + port + " is already in use.");
}
byte[] recvInitPackeByteArray = new byte[1024];
DatagramPacket pkg = new DatagramPacket(recvInitPackeByteArray, recvInitPackeByteArray.length);
serverSocket.receive(pkg);
receiverInfo = new ReceiverInfo(pkg.getPort(), pkg.getAddress());
ObjectInputStream objInputStream = new ObjectInputStream(new ByteArrayInputStream(pkg.getData()));
try {
InitPacket initPkt = (InitPacket) objInputStream.readObject();
return initPkt;
} catch (Exception e) {
System.out.println("Error in init object");
return new InitPacket("ERROR", "");
}
}
public void sendChunksAtConstantRate(FileChunkRetriever chunkRetriever, int chunkLength, SendChunkEndCallback callback) {
final Timer timer = new Timer();
timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
DatagramPacket sendPkt = null;
if (!chunkRetriever.hasNext()) {
try {
ByteArrayOutputStream byteOstream = new ByteArrayOutputStream();
ObjectOutputStream objOStream = new ObjectOutputStream(byteOstream);
EndPacket endPkt = new EndPacket(EndPacket.END_MSG);
objOStream.writeObject(endPkt);
byte[] sendBytes = byteOstream.toByteArray();
sendPkt = new DatagramPacket(sendBytes, sendBytes.length, receiverInfo.getAddress(), receiverInfo.getPort());
serverSocket.send(sendPkt);
} catch (IOException e) {
//TODO Decide what to do
e.printStackTrace();
}
timer.cancel();
callback.execute();
return;
}
Chunk chunk = new Chunk(chunkLength);
try {
if (chunkRetriever.getNextChunk(chunk)) {
ByteArrayOutputStream byteArrayOStream = new ByteArrayOutputStream();
ObjectOutputStream objOutputStream= new ObjectOutputStream(byteArrayOStream);
Package pkg = new Package(chunk);
pkg.setFileSize(chunkRetriever.getTotalFileSize());
pkg.setTimeStamp(Calendar.getInstance().getTimeInMillis());
objOutputStream.writeObject(pkg);
sendPkt = new DatagramPacket(byteArrayOStream.toByteArray(),
byteArrayOStream.size(),
receiverInfo.getAddress(),
receiverInfo.getPort());
if (serverSocket.isClosed()) {
serverSocket = new DatagramSocket(port);
}
serverSocket.send(sendPkt);
}
} catch (IOException e) {
//TODO Decide what to do
e.printStackTrace();
}
}
}, DELAY, INTERVAL);
}
public void sendEndMsg() throws IOException {
EndPacket endPkt = new EndPacket(EndPacket.END_MSG);
ByteArrayOutputStream outByteArray = new ByteArrayOutputStream();
new ObjectOutputStream(outByteArray).writeObject(endPkt);
DatagramPacket endDatagram = new DatagramPacket(outByteArray.toByteArray(),
outByteArray.size(),
receiverInfo.getAddress(),
receiverInfo.getPort());
if (serverSocket.isClosed()) {
serverSocket = new DatagramSocket(port);
}
serverSocket.send(endDatagram);
}
public void close() {
if (!serverSocket.isClosed()) {
serverSocket.close();
}
}
private class ReceiverInfo {
private InetAddress address;
private int port;
public ReceiverInfo(int port, InetAddress address) {
this.port = port;
this.address = address;
}
public InetAddress getAddress() {
return address;
}
public int getPort() {
return port;
}
}
public interface SendChunkEndCallback {
void execute();
}
}
|
package br.ufrj.cos.redes.sender;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketException;
import java.util.Timer;
import java.util.TimerTask;
import br.ufrj.cos.redes.fileAccess.Chunk;
import br.ufrj.cos.redes.fileAccess.FileChunkRetriever;
import br.ufrj.cos.redes.packet.EndPacket;
import br.ufrj.cos.redes.packet.InitPacket;
import br.ufrj.cos.redes.packet.Package;
public class Sender {
private DatagramSocket serverSocket;
private int port;
private ReceiverInfo receiverInfo;
private final long DELAY = 20; //milliseconds
private final long INTERVAL = 20; //milliseconds
private int count = 0;
public Sender(int port) {
this.port = port;
}
public InitPacket initSender() throws IOException {
try {
serverSocket = new DatagramSocket(port);
} catch(SocketException e) {
throw new IOException("The port number " + port + " is already in use.");
}
byte[] recvInitPackeByteArray = new byte[1024];
DatagramPacket pkg = new DatagramPacket(recvInitPackeByteArray, recvInitPackeByteArray.length);
serverSocket.receive(pkg);
receiverInfo = new ReceiverInfo(pkg.getPort(), pkg.getAddress());
ObjectInputStream objInputStream = new ObjectInputStream(new ByteArrayInputStream(pkg.getData()));
try {
InitPacket initPkt = (InitPacket) objInputStream.readObject();
return initPkt;
} catch (Exception e) {
System.out.println("Error in init object");
return new InitPacket("ERROR", "");
}
}
public void sendChunksAtConstantRate(FileChunkRetriever chunkRetriever, int chunkLength, SendChunkEndCallback callback) {
final Timer timer = new Timer();
timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
if (!chunkRetriever.hasNext()) {
timer.cancel();
callback.execute();
return;
}
Chunk chunk = new Chunk(chunkLength);
try {
if (chunkRetriever.getNextChunk(chunk)) {
ByteArrayOutputStream byteArrayOStream = new ByteArrayOutputStream();
ObjectOutputStream objOutputStream= new ObjectOutputStream(byteArrayOStream);
Package pkg = new Package(chunk);
pkg.setFileSize(chunkRetriever.getTotalFileSize());
chunk.setTransTimeStamp(INTERVAL + (INTERVAL*count++));
objOutputStream.writeObject(pkg);
DatagramPacket sendPkt = new DatagramPacket(byteArrayOStream.toByteArray(),
byteArrayOStream.size(),
receiverInfo.getAddress(),
receiverInfo.getPort());
if (serverSocket.isClosed()) {
serverSocket = new DatagramSocket(port);
}
serverSocket.send(sendPkt);
}
} catch (IOException e) {
e.printStackTrace();
}
}
}, DELAY, INTERVAL);
}
public void sendEndMsg() throws IOException {
EndPacket endPkt = new EndPacket(EndPacket.END_MSG);
ByteArrayOutputStream outByteArray = new ByteArrayOutputStream();
new ObjectOutputStream(outByteArray).writeObject(endPkt);
DatagramPacket endDatagram = new DatagramPacket(outByteArray.toByteArray(),
outByteArray.size(),
receiverInfo.getAddress(),
receiverInfo.getPort());
if (serverSocket.isClosed()) {
serverSocket = new DatagramSocket(port);
}
serverSocket.send(endDatagram);
}
public void close() {
if (!serverSocket.isClosed()) {
serverSocket.close();
}
}
private class ReceiverInfo {
private InetAddress address;
private int port;
public ReceiverInfo(int port, InetAddress address) {
this.port = port;
this.address = address;
}
public InetAddress getAddress() {
return address;
}
public int getPort() {
return port;
}
}
public interface SendChunkEndCallback {
void execute();
}
}
|
package chapter_1;
import java.util.HashMap;
import java.util.Map;
import java.util.Scanner;
/**
* Question 1.4: Given a string, write a function to check if it is a
* permutation of a palindrome. A palindrome is a word or phrase that is the
* same forwards and backwards. A permutation is a rearrangement of letters. The
* palindrome does not need to be limited to just dictionary words.
*
* EXAMPLE Input: Tact Coa, Output: True (permutations: "taco cat", "atco cta",
* etc.)
*
* @author Sudharsanan Muralidharan
*/
public class PalindromePermutation {
/**
* Check if a palindrome exists in the permutation of words. Using a HashMap
* to check if there is a corresponding character for each to satisfy the
* palindrome condition with an exception in case of odd length palindrome.
* Odd length palindrome will have 1 middle character. Time Complexity:
* O(n), Additional Space: O(n)
*
* @param input
* @return isPalindromePermutation
*/
private boolean isPalindromePermutationMap(String input) {
Map<Character, Integer> characterMap = new HashMap<Character, Integer>();
char[] characters = input.toCharArray();
int sum = 0;
for (char ch : characters) {
if (ch != ' ') {
// Toggle the value for each character
if (characterMap.containsKey(ch)) {
int count = characterMap.get(ch);
characterMap.put(ch, (count == 1) ? 0 : 1);
} else {
characterMap.put(ch, 1);
}
}
}
for (Character key : characterMap.keySet()) {
sum += characterMap.get(key).intValue();
}
// sum should be either 0 or 1 for it to be a palindrome
return sum <= 1;
}
/**
* Check if a palindrome exists in the permutation of words. Using an array
* of length 26 for each character to satisfy palindrome condition.
* Complexity: O(n), Space: O(1) [26 characters fixed]. Works for all
* alphabets.
*
* @param input
* @return isPalindromePermutation
*/
private boolean isPalindromePermutationArr(String input) {
int[] counter = new int[26];
char[] characters = input.toCharArray();
int sum = 0;
for (char ch : characters) {
if (ch != ' ') {
// Toggle values in the array between 0 and 1 for each character
int val = counter[ch - 'a'];
counter[ch - 'a'] = (val == 1) ? 0 : 1;
}
}
for (int i = 0; i < counter.length; i++) {
sum += counter[i];
}
// sum should be either 0 or 1 for it to be a palindrome
return sum <= 1;
}
/**
* Check if a palindrome exists in the permutation of words. Using an
* integer value and using bit manipulation instead of an array
*
* @param input
* @return isPalindromePermutation
*/
private boolean isPalindromePermutationBit(String input) {
int checker = 0;
char[] characters = input.toCharArray();
for (char ch : characters) {
int diff = ch - 'a';
int shift = (1 << diff); // set 1 to the diff bit
// if bit is already set to 1 at the position it can't be 0
if ((checker & shift) != 0) {
// toggle that bit
int val = Integer.MAX_VALUE - (int) Math.pow(2.0, diff);
// use & to toggle the bit
checker &= val;
} else {
checker |= shift;
}
}
// checker should have one or zero bits set to 1
return (checker & (checker - 1)) == 0;
}
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
PalindromePermutation palindromePermutation = new PalindromePermutation();
String input = scanner.nextLine().toLowerCase();
if (input != null && !input.isEmpty()) {
System.out.println(palindromePermutation.isPalindromePermutationMap(input));
System.out.println(palindromePermutation.isPalindromePermutationArr(input));
System.out.println(palindromePermutation.isPalindromePermutationBit(input));
}
scanner.close();
}
}
|
package ed.lang.ruby;
import org.testng.annotations.Test;
import org.jruby.RubyNumeric;
import org.jruby.runtime.builtin.IRubyObject;
import ed.lang.ruby.RubyObjectWrapper;
import ed.lang.ruby.RubyJSObjectWrapper;
import ed.js.JSObjectBase;
import ed.js.engine.Scope;
import static ed.lang.ruby.RubyObjectWrapper.*;
public class RubyJSObjectWrapperTest extends ed.TestCase {
Scope s = new Scope("test", null);
org.jruby.Ruby r = org.jruby.Ruby.newInstance();
@Test(groups = {"basic"})
public void testAccessors() {
JSObjectBase jobj = new JSObjectBase();
jobj.set("count", new Integer(1));
IRubyObject ro = toRuby(s, r, jobj);
assertTrue(ro instanceof RubyJSObjectWrapper);
r.getGlobalVariables().set("$jobj", ro);
assertEquals(1L, RubyNumeric.num2long(r.evalScriptlet("$jobj.count")));
assertEquals(3L, RubyNumeric.num2long(r.evalScriptlet("$jobj.count += 2; $jobj.count")));
}
public static void main(String args[]) {
new RubyJSObjectWrapperTest().runConsole();
}
}
|
package examples.content;
import jade.core.*;
import jade.core.behaviours.*;
import jade.lang.acl.ACLMessage;
import jade.lang.acl.MessageTemplate;
import jade.util.leap.List;
import jade.util.leap.ArrayList;
import jade.util.leap.Iterator;
import jade.content.*;
import jade.content.abs.*;
import jade.content.onto.*;
import jade.content.onto.basic.*;
import jade.content.acl.*;
import jade.content.lang.*;
import jade.content.lang.leap.*;
import examples.content.musicOntology.*;
import examples.content.ecommerceOntology.*;
public class CDOwner extends Agent {
// We handle contents
private ContentManager manager = (ContentManager)getContentManager();
// This agent speaks a language called "LEAP"
private Codec codec = new LEAPCodec();
// This agent complies with the People ontology
private Ontology ontology = MusicOntology.getInstance();
protected void setup() {
manager.registerLanguage(codec);
manager.registerOntology(ontology);
addBehaviour(new InformManager(this));
addBehaviour(new QueryManager(this));
addBehaviour(new RequestManager(this));
CD myCd = new CD();
myCd.setTitle("Synchronicity");
List tracks = new ArrayList();
Track t1 = new Track();
t1.setName("Every breath you take");
tracks.add(t1);
Track t2 = new Track();
t2.setName("King-of-pain");
t2.setDuration(new Integer(240000));
tracks.add(t2);
myCd.setTracks(tracks);
addBehaviour(new ItemInformSender(this, myCd));
}
// SELLER informs BUYER that he owns a given Item
class ItemInformSender extends OneShotBehaviour {
private Item it;
public ItemInformSender(Agent a, Item it) {
super(a);
this.it = it;
}
public void action() {
try {
System.out.println(getLocalName()+": Send INFORM");
// Prepare the message
ACLMessage msg = new ACLMessage(ACLMessage.INFORM);
AID receiver = getAID(); // Send the message to myself
msg.setSender(getAID());
msg.addReceiver(receiver);
msg.setLanguage(codec.getName());
msg.setOntology(ontology.getName());
// Fill the content
Owns owns = new Owns();
owns.setOwner(getAID());
owns.setItem(it);
manager.fillContent(msg, owns);
send(msg);
}
catch(Exception e) {
e.printStackTrace();
}
}
}
// BUYER handles informations received from the SELLER
class InformManager extends CyclicBehaviour {
public InformManager(Agent a) {
super(a);
}
public void action() {
ACLMessage msg = receive(MessageTemplate.MatchPerformative(ACLMessage.INFORM));
if (msg != null) {
System.out.println(getLocalName()+": INFORM received");
try {
ContentElement ce = manager.extractContent(msg);
if (ce instanceof Owns) {
Owns owns = (Owns) ce;
AID owner = owns.getOwner();
System.out.println("Owner is: "+owner);
Item it = owns.getItem();
System.out.println("Item is:");
System.out.println(it);
addBehaviour(new QuerySender(myAgent, it));
}
else if (ce instanceof Costs) {
Costs c = (Costs) ce;
Item it = c.getItem();
Price p = c.getPrice();
System.out.println("Item ");
System.out.println(it);
System.out.println("costs "+p.getValue());
addBehaviour(new RequestSender(myAgent, it));
}
else {
System.out.println("Unknown predicate "+ce.getClass().getName());
}
}
catch (UngroundedException ue) {
try {
AbsContentElement ce = manager.extractAbsContent(msg);
if (ce.getTypeName().equals(BasicOntology.EQUALS)) {
AbsConcept price = (AbsConcept) ce.getAbsObject(BasicOntology.EQUALS_RIGHT);
System.out.println("Price is "+price.getInteger(ECommerceOntology.PRICE_VALUE));
AbsIRE iota = (AbsIRE) ce.getAbsObject(BasicOntology.EQUALS_LEFT);
AbsProposition costs = iota.getProposition();
AbsConcept i = (AbsConcept) costs.getAbsObject(ECommerceOntology.COSTS_ITEM);
Item item = (Item) MusicOntology.getInstance().toObject(i);
addBehaviour(new RequestSender(myAgent, item));
}
else {
System.out.println("Unknown predicate "+ce.getTypeName());
}
}
catch (Exception e) {
e.printStackTrace();
}
}
catch(Exception e) {
e.printStackTrace();
}
}
else {
block();
}
}
}
// BUYER queries the SELLER how much a given item costs
class QuerySender extends OneShotBehaviour {
Item it;
public QuerySender(Agent a, Item it) {
super(a);
this.it = it;
}
public void action() {
try {
System.out.println(getLocalName()+": Send QUERY_REF");
// Prepare the message
ACLMessage msg = new ACLMessage(ACLMessage.QUERY_REF);
AID receiver = getAID(); // Send the message to myself
msg.setSender(getAID());
msg.addReceiver(receiver);
msg.setLanguage(codec.getName());
msg.setOntology(ontology.getName());
// Fill the content
Ontology onto = MusicOntology.getInstance();
AbsVariable x = new AbsVariable("x", ECommerceOntology.PRICE);
AbsPredicate costs = new AbsPredicate(ECommerceOntology.COSTS);
costs.set(ECommerceOntology.COSTS_ITEM, (AbsTerm) onto.fromObject(it));
costs.set(ECommerceOntology.COSTS_PRICE, x);
AbsIRE iota = new AbsIRE(LEAPCodec.IOTA);
iota.setVariable(x);
iota.setProposition(costs);
manager.fillContent(msg, iota);
send(msg);
}
catch(Exception e) {
e.printStackTrace();
}
}
}
// SELLER handles queries received from BUYER
class QueryManager extends CyclicBehaviour {
public QueryManager(Agent a) {
super(a);
}
public void action() {
ACLMessage msg = receive(MessageTemplate.MatchPerformative(ACLMessage.QUERY_REF));
if (msg != null) {
try {
System.out.println(getLocalName()+": QUERY_REF received");
// The content of a QUERY_REF is definitely an abstract descriptor
// representing an IRE
AbsIRE ire = (AbsIRE) manager.extractAbsContent(msg);
if (ire.getTypeName().equals(LEAPCodec.IOTA)) {
AbsPredicate p = (AbsPredicate) ire.getProposition();
if (p.getTypeName().equals(ECommerceOntology.COSTS) &&
p.getAbsTerm(ECommerceOntology.COSTS_PRICE) instanceof AbsVariable) {
AbsConcept absItem = (AbsConcept) p.getAbsTerm(ECommerceOntology.COSTS_ITEM);
Item it = (Item) MusicOntology.getInstance().toObject(absItem);
addBehaviour(new PriceInformSender(myAgent, it));
}
else {
System.out.println("Can't answer to query!!");
}
}
else {
System.out.println("Unknown IRE type");
}
}
catch(Exception e) {
e.printStackTrace();
}
}
else {
block();
}
}
}
// SELLER informs BUYER about the cost of a given Item
class PriceInformSender extends OneShotBehaviour {
private Item it;
public PriceInformSender(Agent a, Item it) {
super(a);
this.it = it;
}
public void action() {
try {
System.out.println(getLocalName()+": Send INFORM");
// Prepare the message
ACLMessage msg = new ACLMessage(ACLMessage.INFORM);
AID receiver = getAID(); // Send the message to myself
msg.setSender(getAID());
msg.addReceiver(receiver);
msg.setLanguage(codec.getName());
msg.setOntology(ontology.getName());
// Fill the content
/*Costs costs = new Costs();
costs.setItem(it);
costs.setPrice(new Price(new Integer(40000)));
manager.fillContent(msg, costs);*/
Ontology onto = MusicOntology.getInstance();
AbsVariable x = new AbsVariable("x", ECommerceOntology.PRICE);
AbsPredicate costs = new AbsPredicate(ECommerceOntology.COSTS);
costs.set(ECommerceOntology.COSTS_ITEM, (AbsTerm) onto.fromObject(it));
costs.set(ECommerceOntology.COSTS_PRICE, x);
AbsIRE iota = new AbsIRE(LEAPCodec.IOTA);
iota.setVariable(x);
iota.setProposition(costs);
AbsPredicate equals = new AbsPredicate(BasicOntology.EQUALS);
equals.set(BasicOntology.EQUALS_LEFT, iota);
AbsConcept price = new AbsConcept(ECommerceOntology.PRICE);
price.set(ECommerceOntology.PRICE_VALUE, 40000);
equals.set(BasicOntology.EQUALS_RIGHT, price);
manager.fillContent(msg, equals);
send(msg);
}
catch(Exception e) {
e.printStackTrace();
}
}
}
// BUYER requests SELLER to sell a given Item
class RequestSender extends OneShotBehaviour {
private Item item = null;
public RequestSender(Agent a, Item item) {
super(a);
this.item = item;
}
public void action() {
try {
System.out.println(getLocalName()+": Send REQUEST");
// Prepare the message
ACLMessage msg = new ACLMessage(ACLMessage.REQUEST);
AID receiver = getAID(); // Send the message to myself
msg.setSender(getAID());
msg.addReceiver(receiver);
msg.setLanguage(codec.getName());
msg.setOntology(ontology.getName());
// Fill the content
Sell sell = new Sell();
sell.setBuyer(getAID());
sell.setItem(item);
sell.setCardNumber("3475660018");
manager.fillContent(msg, sell);
send(msg);
}
catch(Exception e) {
e.printStackTrace();
}
}
}
// SELLER handles requests from BUYER
class RequestManager extends CyclicBehaviour {
public RequestManager(Agent a) {
super(a);
}
public void action() {
ACLMessage msg = receive(MessageTemplate.MatchPerformative(ACLMessage.REQUEST));
if (msg != null) {
try {
System.out.println(getLocalName()+": REQUEST received");
ContentElement ce = manager.extractContent(msg);
if (ce instanceof Sell) {
Sell sell = (Sell) ce;
System.out.println("Buyer is:");
System.out.println(sell.getBuyer());
System.out.println("Item is:");
System.out.println(sell.getItem());
System.out.println("Card number is:");
System.out.println(sell.getCardNumber());
}
else {
System.out.println("Unknown action");
}
}
catch(Exception e) {
e.printStackTrace();
}
}
else {
block();
}
}
}
}
|
package com.nerodesk.om.aws;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.jcabi.aspects.Tv;
import com.jcabi.s3.Bucket;
import com.jcabi.s3.Ocket;
import com.jcabi.s3.mock.MkBucket;
import com.nerodesk.om.Docs;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import nl.jqno.equalsverifier.EqualsVerifier;
import nl.jqno.equalsverifier.Warning;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
/**
* Tests for {@link AwsDocs}.
*
* @author Krzysztof Krason (Krzysztof.Krason@gmail.com)
* @author Felipe Pina (felipe.pina@protonmail.com)
* @author Carlos Alexandro Becker (caarlos0@gmail.com)
* @version $Id$
* @since 0.3
*/
public final class AwsDocsTest {
/**
* Temporary folder.
* @checkstyle VisibilityModifierCheck (3 lines)
*/
@Rule
public final transient TemporaryFolder folder = new TemporaryFolder();
/**
* Setup mocks.
*/
@Before
public void init() {
MockitoAnnotations.initMocks(this);
}
/**
* AwsDocs can obtain an AwsDoc.
* @throws Exception in case of error.
*/
@Test
public void obtainsDoc() throws Exception {
final Bucket bucket = this.mockBucket("any-name", "any-file");
MatcherAssert.assertThat(
new AwsDocs(bucket, "urn2").doc("doc1"),
Matchers.notNullValue()
);
}
@Test
public void fetchesSize() throws Exception {
final long size = Tv.THOUSAND;
final Bucket bucket = Mockito.mock(Bucket.class);
final Ocket ocket = Mockito.mock(Ocket.class);
Mockito.doReturn(ocket).when(bucket).ocket(Mockito.anyString());
Mockito.doReturn(Collections.singleton("test"))
.when(bucket).list(Mockito.anyString());
final ObjectMetadata meta = new ObjectMetadata();
meta.setContentLength(size);
Mockito.doReturn(meta).when(ocket).meta();
MatcherAssert.assertThat(
new AwsDocs(bucket, "urn3").size(),
Matchers.is(size)
);
}
/**
* AwsDocs conforms to equals and hashCode contract.
*/
@Test
public void conformsToEqualsHashCodeContract() {
EqualsVerifier.forClass(AwsDocs.class)
.suppress(Warning.TRANSIENT_FIELDS)
.verify();
}
/**
* AwsDocs can list all docs.
* @throws IOException If something goes wrong.
*/
@Test
public void listsDocs() throws IOException {
final String label = "lists";
final String name = "lists-exists";
final Bucket bucket = this.mockBucket(label, name);
final List<String> expected = Arrays.asList(
name,
// @checkstyle MultipleStringLiteralsCheck (1 line)
Paths.get("sub", "file").toString()
);
final List<String> names = new AwsDocs(bucket, label).names();
Collections.sort(names);
MatcherAssert.assertThat(names, Matchers.equalTo(expected));
}
/**
* AwsDocs can find a doc.
* @throws IOException If something goes wrong.
*/
@Test
public void findsDoc() throws IOException {
final String label = "finds";
final String exists = "finds-exists";
final Bucket bucket = this.mockBucket(label, exists);
final Docs docs = new AwsDocs(bucket, label);
MatcherAssert.assertThat(
docs.doc(exists).exists(),
Matchers.equalTo(true)
);
MatcherAssert.assertThat(
docs.doc("xyz").exists(),
Matchers.equalTo(false)
);
}
/**
* Builds a mock Bucket.
* @param name Bucket name.
* @param exists Name of Ocket that should exist.
* @return The mock bucket.
* @throws IOException If something goes wrong.
*/
private Bucket mockBucket(final String name, final String exists)
throws IOException {
final Path bucket = Files.createDirectories(
Paths.get(this.folder.getRoot().getAbsolutePath(), name, name)
);
Files.createFile(bucket.resolve(exists));
Files.createFile(
Files.createDirectories(bucket.resolve("sub")).resolve("file")
);
return new MkBucket(this.folder.getRoot(), name);
}
}
|
package com.rultor.profiles;
import com.jcabi.matchers.XhtmlMatchers;
import org.hamcrest.MatcherAssert;
import org.junit.Test;
/**
* Tests for ${@link YamlXML}.
*
* @author Yegor Bugayenko (yegor@tpc2.com)
* @version $Id$
* @since 1.0
*/
public final class YamlXMLTest {
/**
* YamlXML can parse.
* @throws Exception In case of error.
*/
@Test
public void parsesYamlConfig() throws Exception {
MatcherAssert.assertThat(
new YamlXML("a: test\nb: 'hello'\nc:\n - one\nd:\n f: e").get(),
XhtmlMatchers.hasXPaths(
"/p/entry[@key='a' and .='test']",
"/p/entry[@key='b' and .='hello']",
"/p/entry[@key='c']/item[.='one']",
"/p/entry[@key='d']/entry[@key='f' and .='e']"
)
);
}
/**
* YamlXML can parse a broken text.
* @throws Exception In case of error.
*/
@Test
public void parsesYamlConfigWhenBroken() throws Exception {
MatcherAssert.assertThat(
new YamlXML("a: alpha\nb:\nc:\n - beta").get(),
XhtmlMatchers.hasXPaths(
"/p/entry[@key='a' and .='alpha']",
"/p/entry[@key='b' and .='']",
"/p/entry[@key='c']/item[.='beta']"
)
);
}
}
|
package com.tomgibara.storage;
import static java.util.Arrays.asList;
import junit.framework.TestCase;
public class StoreTest extends TestCase {
public void testTransformedBy() {
Store<Integer> s = Stores.intsAndNull(1,2,3);
assertTrue(s.isNullAllowed());
Store<Integer> t = s.transformedBy(i -> 2 * i);
assertEquals(s.count(), t.count());
for (int i = 0; i < s.count(); i++) {
assertEquals(s.get(i) * 2, t.get(i).intValue());
}
assertTrue(s.isMutable());
assertFalse(t.isMutable());
try {
t.transpose(0, 1);
fail();
} catch (IllegalStateException e) {
/* expected */
}
s.set(0, null);
assertNull(t.get(0));
}
public void testNullableResizedCopy() {
Store<Integer> s = Stores.intsAndNull(1,2,3);
Store<Integer> t = s.resizedCopy(5);
assertTrue(t.isMutable());
assertTrue(t.isNullAllowed());
assertEquals(asList(1,2,3,null,null), t.asList());
Store<Integer> u = s.immutableCopy();
t.set(0, 0);
t.set(3, 0);
t.set(4, 0);
assertEquals(asList(0,2,3,0,0), t.asList());
assertEquals(u.asList(), s.asList());
assertEquals(asList(1,2), s.resizedCopy(2).asList());
}
public void testResizedCopy() {
Store<Integer> s = Stores.ints(1,2,3);
Store<Integer> t = s.resizedCopy(5);
assertTrue(t.isMutable());
assertFalse(t.isNullAllowed());
assertEquals(asList(1,2,3,0,0), t.asList());
Store<Integer> u = s.immutableCopy();
t.set(0, 4);
t.set(3, 4);
t.set(4, 4);
assertEquals(asList(4,2,3,4,4), t.asList());
assertEquals(u.asList(), s.asList());
assertEquals(asList(1,2), s.resizedCopy(2).asList());
}
}
|
package com.wizzardo.tools.xml;
import org.junit.Assert;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
public class NodeTest {
@Test
public void parse() {
String s;
Node xml;
s = "I say: '${hello}'";
Assert.assertEquals("I say: '${hello}'", new XmlParser().parse(s).textOwn());
s = "<xml><xml>";
Assert.assertEquals("xml", new XmlParser().parse(s).name());
s = "<xml/>";
Assert.assertEquals("xml", new XmlParser().parse(s).name());
s = "<xml attr><xml>";
Assert.assertEquals(true, new XmlParser().parse(s).hasAttr("attr"));
s = "<xml attr ><xml>";
Assert.assertEquals(true, new XmlParser().parse(s).hasAttr("attr"));
s = "<xml attr />";
Assert.assertEquals(true, new XmlParser().parse(s).hasAttr("attr"));
s = "<xml attr/>";
Assert.assertEquals(true, new XmlParser().parse(s).hasAttr("attr"));
s = "<xml attr attr2/>";
Assert.assertEquals(true, new XmlParser().parse(s).hasAttr("attr"));
s = "<xml attr=\"qwerty\"/>";
Assert.assertEquals("qwerty", new XmlParser().parse(s).attr("attr"));
s = "<xml attr=\"qwerty\" attr2/>";
Assert.assertEquals("qwerty", new XmlParser().parse(s).attr("attr"));
s = "<xml attr2 attr=\"qwerty\"/>";
Assert.assertEquals("qwerty", new XmlParser().parse(s).attr("attr"));
s = "<xml><child></child></xml>";
Assert.assertEquals(1, new XmlParser().parse(s).size());
s = "<xml><child/></xml>";
Assert.assertEquals(1, new XmlParser().parse(s).size());
s = "<xml><child attr=\"qwerty\"/></xml>";
Assert.assertEquals(1, new XmlParser().parse(s).size());
Assert.assertEquals("qwerty", new XmlParser().parse(s).first().attr("attr"));
s = "<xml><child/><child/><child/>ololo</xml>";
Assert.assertEquals(4, new XmlParser().parse(s).size());
Assert.assertEquals("ololo", new XmlParser().parse(s).text());
Assert.assertEquals("ololo", new XmlParser().parse(s).textOwn());
s = "<xml><child/><child/><child>ololo</child></xml>";
Assert.assertEquals(3, new XmlParser().parse(s).size());
Assert.assertEquals("ololo", new XmlParser().parse(s).text());
Assert.assertEquals("", new XmlParser().parse(s).textOwn());
s = "<xml><child/><child/><child>ololo</child>lo</xml>";
Assert.assertEquals(4, new XmlParser().parse(s).size());
Assert.assertEquals("ololo lo", new XmlParser().parse(s).text());
Assert.assertEquals("lo", new XmlParser().parse(s).textOwn());
s = "<xml>\n\t\tololo\n\t\t</xml>";
Assert.assertEquals("ololo", new XmlParser().parse(s).text());
}
@Test
public void xml_comment_1() throws IOException {
String s = "<div><!-- <comment> --></div>";
Node div = new XmlParser().parse(s);
Assert.assertEquals(0, div.attributes().size());
Assert.assertEquals(1, div.children().size());
Assert.assertEquals(true, div.children().get(0).isComment());
Assert.assertEquals("<!-- <comment> -->", div.children().get(0).ownText());
}
@Test
public void xml_comment_2() throws IOException {
String s = "" +
"<div>\n" +
" <!--[if IE]>\n" +
" According to the conditional comment this is IE<br />\n" +
" <![endif]
"</div>\n";
Node div = new XmlParser().parse(s);
Assert.assertEquals(0, div.attributes().size());
Assert.assertEquals(1, div.children().size());
Assert.assertEquals(true, div.children().get(0).isComment());
Assert.assertEquals("" +
"<!-- [if IE]>\n" +
" According to the conditional comment this is IE<br />\n" +
" <![endif] -->", div.children().get(0).ownText());
}
@Test
public void html_1() throws IOException {
String s = "";
for (File f : new File("src/test/resources/xml").listFiles()) {
System.out.println("parsing: " + f);
new HtmlParser().parse(f);
}
}
@Test
public void html_2() throws IOException {
String s = "<div width=100px></div>";
Node root = new HtmlParser().parse(s);
Node div = root.children().get(0);
Assert.assertEquals(1, div.attributes().size());
Assert.assertEquals(0, div.children().size());
Assert.assertEquals("100px", div.attr("width"));
s = "<div width=100px height=50px></div>";
root = new HtmlParser().parse(s);
div = root.children().get(0);
Assert.assertEquals(2, div.attributes().size());
Assert.assertEquals(0, div.children().size());
Assert.assertEquals("100px", div.attr("width"));
Assert.assertEquals("50px", div.attr("height"));
}
@Test
public void gsp_1() throws IOException {
String s = "<div><g:textField name=\"${it.key}\" placeholder=\"${[].collect({it})}\"/></div>";
Node root = new GspParser().parse(s);
Node div = root.children().get(0);
Assert.assertEquals("div", div.name());
Assert.assertEquals(1, div.children().size());
Node textField = div.children().get(0);
Assert.assertEquals("g:textField", textField.name());
Assert.assertEquals(0, textField.children().size());
Assert.assertEquals(2, textField.attributes().size());
Assert.assertEquals("${it.key}", textField.attr("name"));
Assert.assertEquals("${[].collect({it})}", textField.attr("placeholder"));
}
@Test
public void gsp_2() throws IOException {
String s = "<div><g:textField name=\"${it.key}\" placeholder=\"${String.valueOf(it.getValue()).replace(\"\\\"\", \"\")}\"/></div>";
Node root = new GspParser().parse(s);
Node div = root.children().get(0);
Assert.assertEquals("div", div.name());
Assert.assertEquals(1, div.children().size());
Node textField = div.children().get(0);
Assert.assertEquals("g:textField", textField.name());
Assert.assertEquals(0, textField.children().size());
Assert.assertEquals(2, textField.attributes().size());
Assert.assertEquals("${it.key}", textField.attr("name"));
Assert.assertEquals("${String.valueOf(it.getValue()).replace(\"\\\"\", \"\")}", textField.attr("placeholder"));
}
@Test
public void gsp_3() throws IOException {
String s = "<div id=\"${id}\"><span>foo:</span>${foo}</div>";
Node root = new GspParser().parse(s);
Node div = root.children().get(0);
Assert.assertEquals("div", div.name());
Assert.assertEquals(2, div.children().size());
Assert.assertEquals(1, div.attributes().size());
Assert.assertEquals("${id}", div.attr("id"));
Node span = div.children().get(0);
Assert.assertEquals("span", span.name());
Assert.assertEquals(1, span.children().size());
Assert.assertEquals("foo:", span.text());
Node foo = div.children().get(1);
Assert.assertEquals("${foo}", foo.text());
}
}
|
package de.galan.commons.time;
import static de.galan.commons.test.Tests.*;
import static de.galan.commons.time.Dates.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.StrictAssertions.assertThat;
import static org.junit.Assert.*;
import java.time.Instant;
import java.time.ZonedDateTime;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import org.apache.commons.lang3.time.DateUtils;
import org.junit.Test;
import de.galan.commons.test.AbstractTestParent;
/**
* CUT DateDsl
*
* @author galan
*/
@Deprecated
public class DatesTest extends AbstractTestParent {
@Test
public void testNow() {
assertDateNear("10s", now());
assertDateNear("10s", from(now()).toDate());
}
@Test
public void testTomorrow() {
Date tx = DateUtils.addDays(new Date(), 1);
assertBetween(tx.getTime() - 1000, tx.getTime() + 1000, tomorrow().getTime());
}
@Test
public void testYesterday() {
Date tx = DateUtils.addDays(new Date(), -1);
assertBetween(tx.getTime() - 1000, tx.getTime() + 1000, yesterday().getTime());
}
@Test
public void testDate() {
Calendar cal = new GregorianCalendar();
cal.set(Calendar.YEAR, 2012);
cal.set(Calendar.MONTH, 4);
cal.set(Calendar.DAY_OF_MONTH, 31);
cal.set(Calendar.HOUR_OF_DAY, 17);
cal.set(Calendar.MINUTE, 51);
cal.set(Calendar.SECOND, 1);
cal.set(Calendar.MILLISECOND, 0);
Date dateString = date("2012-05-31 17:51:01");
Date dateInt = date(2012, 5, 31, 17, 51, 1);
assertEquals(cal.getTime().getTime(), dateString.getTime());
assertEquals(cal.getTime().getTime(), dateInt.getTime());
assertEquals(dateString.getTime(), dateInt.getTime());
}
@Test
public void toStringFormat() throws Exception {
String dateString = "2012-05-31 17:51:01";
assertEquals(dateString, from(date(dateString)).toString());
}
@Test
public void toStringCustomFormat() {
String dateString = "2012-05-31 17:51:01";
assertEquals("05/31/2012", from(date(dateString)).toString("MM/dd/yyyy"));
}
@Test
public void testFromTo() {
assertEquals("2012-05-31 17:51:01", from(date("2012-05-31 17:51:01")).toString());
assertDateNear("1s", from(now()).toDate());
}
@Test
public void testIn() {
assertEquals("2012-06-02 17:51:01", from(date("2012-05-31 17:51:01")).in(2, days()).toString());
assertEquals("2012-08-31 17:51:01", from(date("2012-05-31 17:51:01")).in(3, months()).toString());
assertEquals("2013-05-31 17:51:01", from(date("2012-05-31 17:51:01")).in(1, year()).toString());
String date = from(date("2012-05-31 17:51:01")).in(2, years()).in(3, months()).in(1, hour()).in(10, minutes()).in(20, seconds()).toString();
assertEquals("2014-08-31 19:01:21", date);
}
@Test
public void testBefore() {
assertEquals("2012-05-29 17:51:01", from(date("2012-05-31 17:51:01")).before(2, days()).toString());
assertEquals("2012-02-29 17:51:01", from(date("2012-05-31 17:51:01")).before(3, months()).toString());
assertEquals("2011-05-31 17:51:01", from(date("2012-05-31 17:51:01")).before(1, year()).toString());
String date = from(date("2012-05-31 17:51:01")).before(2, years()).before(3, months()).before(1, hour()).before(10, minutes()).before(20,
seconds()).toString();
assertEquals("2010-02-28 16:40:41", date);
}
@Test
public void testNext() {
assertEquals("2013-01-01 00:00:00", from(date("2012-05-31 17:51:01")).next(year()).toString());
assertEquals("2012-06-01 00:00:00", from(date("2012-05-31 17:51:01")).next(month()).toString());
assertEquals("2012-06-01 00:00:00", from(date("2012-05-31 17:51:01")).next(day()).toString());
assertEquals("2012-05-31 18:00:00", from(date("2012-05-31 17:51:01")).next(hour()).toString());
assertEquals("2012-05-31 17:52:00", from(date("2012-05-31 17:51:01")).next(minute()).toString());
assertEquals("2012-05-31 17:51:02", from(date("2012-05-31 17:51:01")).next(second()).toString());
}
@Test
public void testPrevious() {
assertEquals("2011-01-01 00:00:00", from(date("2012-05-31 17:51:01")).previous(year()).toString());
assertEquals("2012-04-01 00:00:00", from(date("2012-05-31 17:51:01")).previous(month()).toString());
assertEquals("2012-05-30 00:00:00", from(date("2012-05-31 17:51:01")).previous(day()).toString());
assertEquals("2012-05-31 16:00:00", from(date("2012-05-31 17:51:01")).previous(hour()).toString());
assertEquals("2012-05-31 17:50:00", from(date("2012-05-31 17:51:01")).previous(minute()).toString());
assertEquals("2012-05-31 17:51:00", from(date("2012-05-31 17:51:01")).previous(second()).toString());
}
@Test
public void testAt() {
assertEquals("2012-05-31 12:00:00", from(date("2012-05-31 17:51:01")).atNoon().toString());
assertEquals("2012-05-31 00:00:00", from(date("2012-05-31 17:51:01")).atMidnight().toString());
assertEquals("2012-05-31 22:13:09", from(date("2012-05-31 17:51:01")).at("22:13:09").toString());
assertEquals("2012-05-31 22:13:09", from(date("2012-05-31 17:51:01")).at(22, 13, 9).toString());
}
@Test
public void testNextWeekday() {
assertEquals("2012-06-07 17:51:01", from(date("2012-05-31 17:51:01")).next(thursday()).toString());
assertEquals("2012-06-01 17:51:01", from(date("2012-05-31 17:51:01")).next(friday()).toString());
assertEquals("2012-06-02 17:51:01", from(date("2012-05-31 17:51:01")).next(saturday()).toString());
assertEquals("2012-06-03 17:51:01", from(date("2012-05-31 17:51:01")).next(sunday()).toString());
assertEquals("2012-06-04 17:51:01", from(date("2012-05-31 17:51:01")).next(monday()).toString());
assertEquals("2012-06-05 17:51:01", from(date("2012-05-31 17:51:01")).next(tuesday()).toString());
assertEquals("2012-06-06 17:51:01", from(date("2012-05-31 17:51:01")).next(wednesday()).toString());
}
@Test
public void testPreviousWeekday() {
assertEquals("2012-05-24 17:51:01", from(date("2012-05-31 17:51:01")).previous(thursday()).toString());
assertEquals("2012-05-25 17:51:01", from(date("2012-05-31 17:51:01")).previous(friday()).toString());
assertEquals("2012-05-26 17:51:01", from(date("2012-05-31 17:51:01")).previous(saturday()).toString());
assertEquals("2012-05-27 17:51:01", from(date("2012-05-31 17:51:01")).previous(sunday()).toString());
assertEquals("2012-05-28 17:51:01", from(date("2012-05-31 17:51:01")).previous(monday()).toString());
assertEquals("2012-05-29 17:51:01", from(date("2012-05-31 17:51:01")).previous(tuesday()).toString());
assertEquals("2012-05-30 17:51:01", from(date("2012-05-31 17:51:01")).previous(wednesday()).toString());
}
@Test
public void fromDuskTillDawn() throws Exception {
long expected = Durations.dehumanize("12h").longValue();
Date dusk = date("2012-11-01 18:00:00");
Date dawn = date("2012-11-02 06:00:00");
assertEquals(expected, from(dusk).till(dawn));
}
@Test
public void toIso8601Utc() throws Exception {
Date dateIso = dateIso("2013-07-04T07:36:11Z");
assertEquals("2013-07-04T07:36:11Z", from(dateIso).toIso8601Utc());
}
@Test
public void testDateIso() throws Exception {
assertEquals(dateIso("2013-07-04T07:36:11Z"), Date.from(ZonedDateTime.of(2013, 7, 4, 7, 36, 11, 0, Instants.ZONE_UTC).toInstant()));
}
@Test
public void dateLong() throws Exception {
assertEquals(date(0L), dateIso("1970-01-01T00:00:00Z"));
assertEquals(date(1372930571000L), dateIso("2013-07-04T09:36:11Z"));
}
@Test
public void truncate() throws Exception {
long timeWithoutMillis = 1372930571000L;
Date dateWithMillis = date(timeWithoutMillis + 123L);
assertThat(dateWithMillis).isEqualToIgnoringMillis(dateIso("2013-07-04T09:36:11Z"));
assertThat(dateWithMillis).isNotEqualTo(dateIso("2013-07-04T09:36:11Z"));
Date dateWithoutMillis = from(dateWithMillis).truncate(millis()).toDate();
long longWithoutMillis = from(dateWithMillis).truncate(millis()).toLong();
assertThat(dateWithoutMillis).isEqualTo(dateIso("2013-07-04T09:36:11Z"));
assertThat(longWithoutMillis).isEqualTo(timeWithoutMillis);
}
@Test
public void toLong() throws Exception {
long timeWithoutMillis = 1372930571000L;
assertThat(from(date(timeWithoutMillis)).toLong()).isEqualTo(timeWithoutMillis);
long timeWithMillis = 1372930571123L;
assertThat(from(date(timeWithMillis)).toLong()).isEqualTo(timeWithMillis);
}
@Test
public void toInstant() throws Exception {
long timeWithoutMillis = 1372930571000L;
assertThat(from(date(timeWithoutMillis)).toInstant()).isEqualTo(Instant.ofEpochMilli(timeWithoutMillis));
long timeWithMillis = 1372930571123L;
assertThat(from(date(timeWithMillis)).toInstant()).isEqualTo(Instant.ofEpochMilli(timeWithMillis));
}
}
|
package org.amc.myservlet.test;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import javax.persistence.Query;
import org.amc.dao.DAO;
import org.amc.model.User;
import org.amc.model.UserRoles;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
public class TestUser
{
private EntityManager em;
private EntityManagerFactory factory;
@Before
public void setUp() throws Exception
{
// factory=Persistence.createEntityManagerFactory("myDataSource");
// em=factory.createEntityManager();
// //Clear the table
// Query q=em.createNativeQuery("DELETE FROM users");
// Query q1=em.createNativeQuery("DELETE FROM user_roles");
// em.getTransaction().begin();
// q.executeUpdate();
// q1.executeUpdate();
// em.getTransaction().commit();
}
@After
public void tearDown() throws Exception
{
em.close();
factory.close();
}
/**
*
* @return Sample User
*/
public User getTestUser()
{
User u=new User();
u.setFullName("Adrian McLaughlin");
u.setUserName("subwoofer359");
u.setEmailAddress("subwoofer359@gmail.com");
u.setPassword("orororo03");
u.setActive(true);
return u;
}
@Ignore
/**
* Tests methods addUser(User u) and getUser(String id)
*/
@Test
public void testAddUser()
{
User u=getTestUser();
// UserRoles roles=new UserRoles();
// roles.setRoleName("QC");
// roles.setUser(u);
// UserRoles roles2=new UserRoles();
// roles2.setRoleName("Manager");
// roles2.setUser(u);
// List<UserRoles> listOfRoles=new ArrayList<UserRoles>();
// listOfRoles.add(roles);
// listOfRoles.add(roles2);
// u.setRoles(listOfRoles);
DAO<User> ud=new DAO<User>(factory,User.class);
ud.addEntity(u);
User tu=ud.getEntity(String.valueOf(u.getId()));
assertTrue(tu.equals(u));
}
@Ignore
@Test
public void testFindUsers()
{
DAO<User> ud=new DAO<User>(factory,User.class);
User[] users={getTestUser(),getTestUser(),getTestUser(),getTestUser()};
for(User u:users)
{
ud.addEntity(u);
}
List<User> list=ud.findEntities();
assertEquals(users.length, list.size());
}
@Ignore
@Test
public void testFindUsersByValue()
{
String userName="Bunny";
DAO<User> ud=new DAO<User>(factory,User.class);
User u1=getTestUser();
User u2=getTestUser();
u2.setUserName(userName);
User u3=getTestUser();
User u4=getTestUser();
ud.addEntity(u1);
ud.addEntity(u2);
ud.addEntity(u3);
ud.addEntity(u4);
List<User> list=ud.findEntities("userName", userName);
assertEquals(list.size(), 1);
User actualUser=list.get(0);
assertEquals(actualUser.getUserName(),userName);
}
@Ignore
@Test
public void testUpdateUser()
{
String emailAddress="chris@eircom.net";
DAO<User> ud=new DAO<User>(factory,User.class);
User u=getTestUser();
ud.addEntity(u);
u.setEmailAddress(emailAddress);
ud.updateEntity(u);
User tu=ud.getEntity(String.valueOf(u.getId()));
assertEquals(tu.getEmailAddress(), emailAddress);
}
@Ignore
@Test
public void testRoles()
{
String userName="Bunny";
String[] roles={"QC","MANAGER"};
DAO<User> ud=new DAO<User>(factory,User.class);
User u1=getTestUser();
User u2=getTestUser();
u2.setUserName(userName);
UserRoles r1=new UserRoles();
r1.setRoleName(roles[0]);
r1.setUser(u1);
UserRoles r2=new UserRoles();
r2.setRoleName(roles[1]);
r2.setUser(u1);
UserRoles r3=new UserRoles();
r3.setRoleName(roles[0]);
r3.setUser(u2);
List<UserRoles> u1_roles=new ArrayList<UserRoles>();
List<UserRoles> u2_roles=new ArrayList<UserRoles>();
u1_roles.add(r1);
u1_roles.add(r2);
u2_roles.add(r3);
u1.setRoles(u1_roles);
u2.setRoles(u2_roles);
ud.addEntity(u1);
ud.addEntity(u2);
User t1=ud.getEntity(String.valueOf(u1.getId()));
assertTrue(t1!=null);
ud.deleteEntity(u1);
t1=ud.getEntity(String.valueOf(u1.getId()));
assertTrue(t1==null);
}
@Ignore
@Test
public void testDeleteUser()
{
User u1=getTestUser();
UserRoles role1=new UserRoles();
role1.setRoleName("QC");
role1.setUser(u1);
UserRoles role2=new UserRoles();
role2.setRoleName("MANAGER");
role2.setUser(u1);
List<UserRoles> rolesList=new ArrayList<UserRoles>();
rolesList.add(role1);
rolesList.add(role2);
u1.setRoles(rolesList);
DAO<User> ud=new DAO<User>(factory,User.class);
ud.addEntity(u1);
//Test user had persisted
User ru1=ud.getEntity(String.valueOf(u1.getId()));
assertTrue(ru1.equals(u1));
//Delete user
ud.deleteEntity(ru1);
//Test user has been deleted
ru1=ud.getEntity(String.valueOf(u1.getId()));
assertNull(ru1);
}
@Test
public void testSetPassword()
{
User u =new User();
User u2 =new User();
String password="helloworld";
u.setPassword(password);
u2.setPassword(password);
assertTrue(new String(u.getPassword()).equals(new String(u2.getPassword())));
}
}
|
package org.lantern;
import static org.junit.Assert.*;
import static org.mockito.Matchers.*;
import static org.mockito.Mockito.*;
import java.io.IOException;
import java.util.Arrays;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.Test;
import org.lantern.event.Events;
import org.lantern.event.MessageEvent;
import org.lantern.oauth.OauthUtils;
import org.lantern.oauth.RefreshToken;
import org.lantern.proxy.FallbackProxy;
import org.lantern.state.Mode;
import org.lantern.state.Model;
import org.lantern.util.HttpClientFactory;
import com.google.common.eventbus.Subscribe;
public class S3ConfigFetcherTest {
private final AtomicReference<MessageEvent> messageRef =
new AtomicReference<MessageEvent>();
@Test
public void testStopAndStart() throws Exception {
final HttpClientFactory httpClientFactory =
TestingUtils.newHttClientFactory();
TestingUtils.doWithGetModeProxy(new Callable<Void>() {
@Override
public Void call() throws Exception {
final Model model = TestingUtils.newModel();
model.getSettings().setMode(Mode.give);
final OauthUtils oauth = new OauthUtils(httpClientFactory,
model, new RefreshToken(model));
final S3ConfigFetcher fetcher = new S3ConfigFetcher(model,
oauth);
model.setS3Config(null);
fetcher.init();
fetcher.start();
assertNotNull(model.getS3Config());
model.setS3Config(null);
fetcher.stop();
assertNull(model.getS3Config());
fetcher.init();
fetcher.start();
assertNotNull(model.getS3Config());
return null;
}
});
}
@Test
public void testWithExceptions() throws Exception {
Events.register(this);
final Model model = new Model();
final OauthUtils oauth = mock(OauthUtils.class);
when(oauth.getRequest(any(String.class))).thenThrow(new IOException());
final S3ConfigFetcher fetcher = new S3ConfigFetcher(model, oauth);
assertEquals(1, model.getS3Config().getAllFallbacks().size());
model.getS3Config().setFallbacks(Arrays.asList(new FallbackProxy()));
assertNull(messageRef.get());
fetcher.init();
assertEquals(2, model.getS3Config().getAllFallbacks().size());
Thread.sleep(200);
// We want to make sure the message is not sent here, as a single
// failure to download shouldn't result in this message.
assertNull(messageRef.get());
}
@Subscribe
public void onMessage(final MessageEvent event) {
messageRef.set(event);
}
}
|
package seedu.address.ui;
import static org.junit.Assert.assertEquals;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.loadui.testfx.GuiTest;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import javafx.scene.Parent;
import javafx.scene.control.TextField;
import javafx.scene.input.KeyCode;
import seedu.address.commons.core.EventsCenter;
import seedu.address.commons.events.ui.IncorrectCommandAttemptedEvent;
import seedu.address.logic.Logic;
import seedu.address.logic.commands.CommandResult;
import seedu.address.testutil.GuiTests;
/**
* Unit Tests for CommandBox.
*/
@Category({GuiTests.class})
public class CommandBoxTest extends GuiTest {
@Mock
private Logic logic;
@Mock
private ResultDisplay resultDisplay;
private CommandBox commandBox;
private TextField textField;
@Override
protected Parent getRootNode() {
MockitoAnnotations.initMocks(this);
commandBox = new CommandBox(resultDisplay, logic);
return commandBox.getRoot();
}
@Before
public void setupNodes() {
textField = find("#commandTextField");
}
@Test
public void commandInputChanged_callsLogicExecute() {
final String inputCommand = "some command";
final CommandResult result = new CommandResult("some result");
Mockito.when(logic.execute(inputCommand)).thenReturn(result);
textField.setText(inputCommand);
clickOn(textField).push(KeyCode.ENTER);
Mockito.verify(logic).execute(inputCommand);
Mockito.verify(resultDisplay).postMessage(result.feedbackToUser);
assertEquals("", textField.getText());
}
@Test
public void incorrectCommand_restoresCommandText() {
final String inputCommand = "some command";
final CommandResult result = new CommandResult("some result");
Mockito.when(logic.execute(inputCommand)).thenReturn(result);
textField.setText(inputCommand);
clickOn(textField).push(KeyCode.ENTER);
assertEquals("", textField.getText());
EventsCenter.getInstance().post(new IncorrectCommandAttemptedEvent(null));
assertEquals(inputCommand, textField.getText());
}
}
|
package org.umlg.sqlg.strategy;
import com.google.common.base.Preconditions;
import org.apache.commons.lang3.time.StopWatch;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.tinkerpop.gremlin.process.traversal.Path;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.apache.tinkerpop.gremlin.process.traversal.Traverser;
import org.apache.tinkerpop.gremlin.process.traversal.step.TraversalParent;
import org.apache.tinkerpop.gremlin.process.traversal.step.map.GraphStep;
import org.apache.tinkerpop.gremlin.process.traversal.step.util.MutablePath;
import org.apache.tinkerpop.gremlin.process.traversal.traverser.B_LP_O_P_S_SE_SL_Traverser;
import org.apache.tinkerpop.gremlin.process.traversal.traverser.B_LP_O_P_S_SE_SL_TraverserGenerator;
import org.apache.tinkerpop.gremlin.process.traversal.traverser.TraverserRequirement;
import org.apache.tinkerpop.gremlin.process.traversal.util.FastNoSuchElementException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.umlg.sqlg.process.EmitOrderAndRangeHelper;
import org.umlg.sqlg.sql.parse.ReplacedStep;
import org.umlg.sqlg.sql.parse.ReplacedStepTree;
import org.umlg.sqlg.sql.parse.SchemaTableTree;
import org.umlg.sqlg.structure.SqlgCompiledResultIterator;
import org.umlg.sqlg.structure.SqlgElement;
import org.umlg.sqlg.structure.SqlgGraph;
import java.util.*;
public class SqlgGraphStepCompiled<S, E extends SqlgElement> extends GraphStep implements SqlgStep, TraversalParent {
private Logger logger = LoggerFactory.getLogger(SqlgGraphStepCompiled.class.getName());
private List<ReplacedStep<?, ?>> replacedSteps = new ArrayList<>();
private ReplacedStepTree replacedStepTree;
private SqlgGraph sqlgGraph;
private Map<SchemaTableTree, List<Pair<LinkedList<SchemaTableTree>, String>>> parsedForStrategySql = new HashMap<>();
private Emit<E> toEmit = null;
private Iterator<List<Emit<E>>> elementIter;
private List<Emit<E>> eagerLoadedResults = new ArrayList<>();
private Iterator<Emit<E>> eagerLoadedResultsIter;
private Traverser.Admin<S> previousHead;
SqlgGraphStepCompiled(final SqlgGraph sqlgGraph, final Traversal.Admin traversal, final Class<E> returnClass, final boolean isStart, final Object... ids) {
super(traversal, returnClass, isStart, ids);
this.sqlgGraph = sqlgGraph;
}
@Override
protected Traverser.Admin<E> processNextStart() {
while (true) {
if (this.eagerLoadedResultsIter != null && this.eagerLoadedResultsIter.hasNext()) {
Traverser.Admin<E> traverser = null;
Emit<E> emit = this.eagerLoadedResultsIter.next();
boolean first = true;
Iterator<Set<String>> labelIter = emit.getPath().labels().iterator();
for (Object o : emit.getPath().objects()) {
E e = (E) o;
Set<String> labels = labelIter.next();
this.labels = labels;
if (!isStart && previousHead != null && traverser == null) {
traverser = previousHead.split(e, this);
} else if (first) {
first = false;
traverser = B_LP_O_P_S_SE_SL_TraverserGenerator.instance().generate((S) e, this, 1L);
} else {
traverser = ((B_LP_O_P_S_SE_SL_Traverser) traverser).split(e, this);
}
}
return traverser;
} else {
if (!this.isStart) {
this.previousHead = this.starts.next();
} else {
if (this.done) {
throw FastNoSuchElementException.instance();
}
this.done = true;
}
this.elementIter = elements();
eagerLoad();
EmitOrderAndRangeHelper emitOrderAndRangeHelper = new EmitOrderAndRangeHelper<>(this.eagerLoadedResults, this.replacedSteps);
emitOrderAndRangeHelper.sortAndApplyRange();
this.eagerLoadedResultsIter = this.eagerLoadedResults.iterator();
}
}
}
private boolean flattenRawIterator() {
if (this.elementIter.hasNext()) {
List<Emit<E>> emits = this.elementIter.next();
List<SqlgComparatorHolder> emitComparators = new ArrayList<>();
Path currentPath = MutablePath.make();
for (Emit<E> emit : emits) {
this.toEmit = emit;
if (!emit.isFake()) {
currentPath = currentPath.extend(emit.getElement(), emit.getLabels());
emitComparators.add(this.toEmit.getSqlgComparatorHolder());
}
}
if (this.toEmit != null) {
this.toEmit.setPath(currentPath);
this.toEmit.setSqlgComparatorHolders(emitComparators);
}
}
return this.toEmit != null;
}
private void eagerLoad() {
this.eagerLoadedResults.clear();
while (flattenRawIterator()) {
this.eagerLoadedResults.add(this.toEmit);
if (this.toEmit.isRepeat() && !this.toEmit.isRepeated()) {
this.toEmit.setRepeated(true);
this.eagerLoadedResults.add(this.toEmit);
}
this.toEmit = null;
}
}
@Override
public void reset() {
super.reset();
this.previousHead = null;
this.eagerLoadedResults.clear();
}
@Override
public Set<TraverserRequirement> getRequirements() {
return this.getSelfAndChildRequirements(TraverserRequirement.PATH, TraverserRequirement.SIDE_EFFECTS);
}
private Iterator<List<Emit<E>>> elements() {
this.sqlgGraph.tx().readWrite();
if (this.sqlgGraph.tx().getBatchManager().isStreaming()) {
throw new IllegalStateException("streaming is in progress, first flush or commit before querying.");
}
StopWatch stopWatch = new StopWatch();
stopWatch.start();
Preconditions.checkState(this.replacedSteps.size() > 0, "There must be at least one replacedStep");
Preconditions.checkState(this.replacedSteps.get(0).isGraphStep(), "The first step must a SqlgGraphStep");
Set<SchemaTableTree> rootSchemaTableTrees = this.sqlgGraph.getGremlinParser().parse(this.replacedStepTree);
SqlgCompiledResultIterator<List<Emit<E>>> resultIterator = new SqlgCompiledResultIterator<>(this.sqlgGraph, rootSchemaTableTrees);
stopWatch.stop();
if (logger.isDebugEnabled()) {
logger.debug("SqlgGraphStepCompiled finished, time taken {}", stopWatch.toString());
}
return resultIterator;
}
@Override
public List<ReplacedStep<?, ?>> getReplacedSteps() {
return this.replacedSteps;
}
@Override
public ReplacedStepTree.TreeNode addReplacedStep(ReplacedStep<?, ?> replacedStep) {
//depth is + 1 because there is always a root node who's depth is 0
replacedStep.setDepth(this.replacedSteps.size());
this.replacedSteps.add(replacedStep);
//New way of interpreting steps
if (this.replacedStepTree == null) {
//the first root node
this.replacedStepTree = new ReplacedStepTree(replacedStep);
} else {
this.replacedStepTree.addReplacedStep(replacedStep);
}
return this.replacedStepTree.getCurrentTreeNodeNode();
}
@Override
public void parseForStrategy() {
this.parsedForStrategySql.clear();
Preconditions.checkState(this.replacedSteps.size() > 0, "There must be at least one replacedStep");
Preconditions.checkState(this.replacedSteps.get(0).isGraphStep(), "The first step must a SqlgGraphStep");
Set<SchemaTableTree> rootSchemaTableTrees = this.sqlgGraph.getGremlinParser().parseForStrategy(this.replacedSteps);
for (SchemaTableTree rootSchemaTableTree : rootSchemaTableTrees) {
try {
//TODO this really sucks, constructsql should not query, but alas it does for P.within and temp table jol
if (this.sqlgGraph.tx().isOpen() && this.sqlgGraph.tx().getBatchManager().isStreaming()) {
throw new IllegalStateException("streaming is in progress, first flush or commit before querying.");
}
List<Pair<LinkedList<SchemaTableTree>, String>> sqlStatements = rootSchemaTableTree.constructSql();
this.parsedForStrategySql.put(rootSchemaTableTree, sqlStatements);
} finally {
rootSchemaTableTree.resetColumnAliasMaps();
}
}
}
public boolean isForMultipleQueries() {
return this.parsedForStrategySql.size() > 1 || this.parsedForStrategySql.values().stream().filter(l -> l.size() > 1).count() > 0;
}
@Override
public int hashCode() {
int result = super.hashCode() ^ this.returnClass.hashCode();
for (final Object id : this.ids) {
result ^= id.hashCode();
}
return result;
}
}
|
package floobits.common;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonSyntaxException;
import floobits.common.interfaces.IContext;
import floobits.utilities.Flog;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Type;
import java.util.HashMap;
public class Settings {
public static String floorcJsonPath = FilenameUtils.concat(System.getProperty("user.home"), ".floorc.json");
public static FloorcJson get() throws Exception {
File f = new File(floorcJsonPath);
String string;
try {
string = FileUtils.readFileToString(f, "UTF-8");
} catch (IOException e) {
return new FloorcJson();
}
try {
return new Gson().fromJson(string, (Type) FloorcJson.class);
} catch (JsonSyntaxException e) {
throw new Exception("Invalid JSON.");
}
}
public static void write(IContext context, FloorcJson floorcJson) {
File file = new File(floorcJsonPath);
if (!file.exists()) {
boolean newFile;
try {
newFile = file.createNewFile();
} catch (IOException e) {
context.errorMessage("Can't write new ~/.floorc.json");
return;
}
if (!newFile) {
context.errorMessage("Can't write new ~/.floorc.json");
return;
}
}
try {
FileUtils.write(file, new GsonBuilder().setPrettyPrinting().create().toJson(floorcJson));
} catch (IOException e) {
Flog.warn(e);
context.errorMessage("Can't write new ~/.floorc.json");
}
}
public static Boolean isAuthComplete(HashMap<String, String> settings) {
return (settings.get("secret") != null && (settings.get("username") != null || settings.get("api_key") != null));
}
public static Boolean canFloobits() {
HashMap<String, HashMap<String, String>> auth = null;
try {
auth = get().auth;
} catch (Throwable e) {
return false;
}
if (auth == null) {
return false;
}
for (String host : auth.keySet()) {
if (isAuthComplete(auth.get(host))) {
return true;
}
}
return false;
}
}
|
package todomore.android.metawidget;
import org.metawidget.android.widget.widgetprocessor.binding.simple.Converter;
import com.darwinsys.todo.model.Date;
import android.view.View;
/**
* MetaWidget converter for our local Date class.
*/
public class DateConverter implements Converter<Date> {
public DateConverter() {
System.out.println("DateConverter.DateConverter()");
}
/** Convert from a Date to a String for display */
@Override
public Object convertForView(View widget, Date value) {
// System.out.println("DateConverter.convertForView()");
if (value == null) {
return null;
}
return value.toString();
}
/** Convert from a String in the View to a Date object */
@Override
public Date convertFromView(View widget, Object value, Class<?> intoClass) {
// System.out.println("DateConverter.convertFromView()");
if (value instanceof String) {
if (((String) value).isEmpty()) {
return null;
}
return new Date(value.toString());
}
throw new IllegalArgumentException("Can't parse " + value + " of type " + value.getClass().getName());
}
}
|
package org.commcare.adapters;
import android.content.Context;
import android.database.DataSetObserver;
import android.graphics.Bitmap;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import org.commcare.CommCareApplication;
import org.commcare.activities.CommCareActivity;
import org.commcare.core.process.CommCareInstanceInitializer;
import org.commcare.dalvik.R;
import org.commcare.logging.AndroidLogger;
import org.commcare.logging.XPathErrorLogger;
import org.commcare.models.AndroidSessionWrapper;
import org.commcare.preferences.DeveloperPreferences;
import org.commcare.suite.model.EntityDatum;
import org.commcare.suite.model.Entry;
import org.commcare.suite.model.Menu;
import org.commcare.suite.model.MenuDisplayable;
import org.commcare.suite.model.MenuLoader;
import org.commcare.suite.model.SessionDatum;
import org.commcare.suite.model.Suite;
import org.commcare.util.CommCarePlatform;
import org.commcare.util.LoggerInterface;
import org.commcare.utils.MediaUtil;
import org.commcare.views.UserfacingErrorHandling;
import org.commcare.views.media.AudioPlaybackButton;
import org.commcare.views.media.ViewId;
import org.javarosa.core.model.condition.EvaluationContext;
import org.javarosa.core.reference.InvalidReferenceException;
import org.javarosa.core.reference.ReferenceManager;
import org.javarosa.core.services.Logger;
import org.javarosa.core.services.locale.Localization;
import org.javarosa.core.services.locale.Localizer;
import org.javarosa.xpath.XPathException;
import org.javarosa.xpath.XPathTypeMismatchException;
import org.javarosa.xpath.expr.FunctionUtils;
import org.javarosa.xpath.expr.XPathExpression;
import org.javarosa.xpath.parser.XPathSyntaxException;
import java.io.File;
import java.util.Hashtable;
import java.util.Vector;
/**
* Load module menu items
*
* @author wspride
*/
public class MenuAdapter extends BaseAdapter {
protected final AndroidSessionWrapper asw;
private Exception loadError;
private String errorMessage = "";
final Context context;
final MenuDisplayable[] displayableData;
class MenuLogger implements LoggerInterface {
@Override
public void logError(String message, XPathException cause) {
XPathErrorLogger.INSTANCE.logErrorToCurrentApp(cause.getSource(), message);
Logger.log(AndroidLogger.TYPE_ERROR_CONFIG_STRUCTURE, message);
}
@Override
public void logError(String message) {
XPathErrorLogger.INSTANCE.logErrorToCurrentApp(message);
Logger.log(AndroidLogger.TYPE_ERROR_CONFIG_STRUCTURE, message);
}
}
public MenuAdapter(Context context, CommCarePlatform platform, String menuID) {
this.context = context;
asw = CommCareApplication.instance().getCurrentSessionWrapper();
MenuLoader menuLoader = new MenuLoader(platform, asw, menuID, new MenuLogger());
this.displayableData = menuLoader.getMenus();
this.errorMessage = menuLoader.getErrorMessage();
this.loadError = menuLoader.getLoadException();
}
public void showAnyLoadErrors(CommCareActivity activity) {
if (loadError != null) {
UserfacingErrorHandling.createErrorDialog(activity, errorMessage, true);
}
}
@Override
public boolean areAllItemsEnabled() {
return true;
}
@Override
public boolean isEnabled(int arg0) {
return true;
}
@Override
public int getCount() {
return displayableData.length;
}
@Override
public Object getItem(int i) {
return displayableData[i];
}
@Override
public long getItemId(int i) {
Object tempItem = displayableData[i];
if (tempItem instanceof Menu) {
return ((Menu)tempItem).getId().hashCode();
} else {
return ((Entry)tempItem).getCommandId().hashCode();
}
}
@Override
public int getItemViewType(int i) {
return 0;
}
enum NavIconState {
NONE, NEXT, JUMP
}
@Override
public View getView(int i, View menuListItem, ViewGroup vg) {
MenuDisplayable menuDisplayable = displayableData[i];
if (menuListItem == null) {
// inflate it and do not attach to parent, or we will get the 'addView not supported' exception
menuListItem = LayoutInflater.from(context).inflate(R.layout.menu_list_item_modern, vg, false);
}
TextView rowText = (TextView)menuListItem.findViewById(R.id.row_txt);
setupTextView(rowText, menuDisplayable);
AudioPlaybackButton audioPlaybackButton = (AudioPlaybackButton)menuListItem.findViewById(R.id.row_soundicon);
setupAudioButton(i, audioPlaybackButton, menuDisplayable);
// set up the image, if available
ImageView mIconView = (ImageView)menuListItem.findViewById(R.id.row_img);
setupImageView(mIconView, menuDisplayable);
setupBadgeView(menuListItem, menuDisplayable);
return menuListItem;
}
private void setupAudioButton(int rowId, AudioPlaybackButton audioPlaybackButton, MenuDisplayable menuDisplayable) {
final String audioURI = menuDisplayable.getAudioURI();
String audioFilename = "";
if (audioURI != null && !audioURI.equals("")) {
try {
audioFilename = ReferenceManager.instance().DeriveReference(audioURI).getLocalURI();
} catch (InvalidReferenceException e) {
Log.e("AVTLayout", "Invalid reference exception");
e.printStackTrace();
}
}
File audioFile = new File(audioFilename);
// First set up the audio button
ViewId viewId = ViewId.buildListViewId(rowId);
if (!"".equals(audioFilename) && audioFile.exists()) {
audioPlaybackButton.modifyButtonForNewView(viewId, audioURI, true);
} else {
if (audioPlaybackButton != null) {
audioPlaybackButton.modifyButtonForNewView(viewId,audioURI, false);
((LinearLayout)audioPlaybackButton.getParent()).removeView(audioPlaybackButton);
}
}
}
public void setupTextView(TextView textView, MenuDisplayable menuDisplayable) {
String mQuestionText = menuDisplayable.getDisplayText();
//Final change, remove any numeric context requests. J2ME uses these to
//help with numeric navigation.
if (mQuestionText != null) {
mQuestionText = Localizer.processArguments(mQuestionText, new String[]{""}).trim();
}
textView.setText(mQuestionText);
}
public void setupImageView(ImageView mIconView, MenuDisplayable menuDisplayable) {
if (mIconView != null) {
int iconDimension = (int)context.getResources().getDimension(R.dimen.list_icon_bounding_dimen);
Bitmap image = MediaUtil.inflateDisplayImage(context, menuDisplayable.getImageURI(),
iconDimension, iconDimension);
if (image != null) {
mIconView.setImageBitmap(image);
mIconView.setAdjustViewBounds(true);
} else {
setupDefaultIcon(mIconView, getIconState(menuDisplayable));
}
}
}
protected void setupBadgeView(View menuListItem, MenuDisplayable menuDisplayable) {
View badgeView = menuListItem.findViewById(R.id.badge_view);
String badgeText = menuDisplayable.getTextForBadge(
asw.getEvaluationContext(menuDisplayable.getCommandID()));
if (badgeText != null && !"".equals(badgeText) && !"0".equals(badgeText)) {
if (badgeText.length() > 2) {
// A badge can only fit up to 2 characters
badgeText = badgeText.substring(0, 2);
}
TextView badgeTextView = (TextView)menuListItem.findViewById(R.id.badge_text);
badgeTextView.setText(badgeText);
badgeView.setVisibility(View.VISIBLE);
} else {
badgeView.setVisibility(View.GONE);
}
}
private NavIconState getIconState(MenuDisplayable menuDisplayable) {
NavIconState iconChoice = NavIconState.NEXT;
//figure out some icons
if (menuDisplayable instanceof Entry) {
SessionDatum datum = asw.getSession().getNeededDatum((Entry)menuDisplayable);
if (datum == null || !(datum instanceof EntityDatum)) {
iconChoice = NavIconState.JUMP;
}
}
if (!DeveloperPreferences.isNewNavEnabled()) {
iconChoice = NavIconState.NONE;
}
return iconChoice;
}
protected void setupDefaultIcon(ImageView mIconView, NavIconState iconChoice) {
if (mIconView != null) {
switch (iconChoice) {
case NEXT:
mIconView.setImageResource(R.drawable.avatar_module);
break;
case JUMP:
mIconView.setImageResource(R.drawable.avatar_form);
break;
case NONE:
mIconView.setVisibility(View.GONE);
break;
}
}
}
@Override
public int getViewTypeCount() {
return 1;
}
@Override
public boolean hasStableIds() {
return true;
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public void registerDataSetObserver(DataSetObserver arg0) {
}
@Override
public void unregisterDataSetObserver(DataSetObserver arg0) {
}
}
|
public class Journey
{
public Journey (Coordinate coord, int steps)
{
_coord = coord;
_steps = steps;
}
private Coordinate _coord;
private int _steps;
}
|
package com.whatistics.backend.mail;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.mail.BodyPart;
import javax.mail.Message;
import javax.mail.Multipart;
import javax.mail.Part;
import java.io.*;
import java.util.Map;
import java.util.TreeMap;
import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
/**
* @author robert
*/
public class MailUtilities {
private static final Logger logger = LoggerFactory.getLogger(MailUtilities.class);
private static Pattern txtPattern = Pattern.compile("(.+?)(\\.txt)$");
private static Pattern zipPattern = Pattern.compile("(.+?)(\\.zip)$");
public static TreeMap<String, InputStream> getCleanAttachments(Message message) {
TreeMap<String, InputStream> attachments = MailUtilities.getAttachments(message);
// unzip potential .zip files
for (Map.Entry<String, InputStream> entry : attachments.entrySet()) {
if (zipPattern.matcher(entry.getKey()).matches()) {
ZipInputStream zis = new ZipInputStream(entry.getValue());
ZipEntry zipEntry;
try {
while ((zipEntry = zis.getNextEntry()) != null) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
// consume all the data from this entry
while (zis.available() > 0)
outputStream.write(zis.read());
// add file to attachments
// todo: don't do in memory
attachments.put(zipEntry.getName(), new ByteArrayInputStream(outputStream.toByteArray()));
}
} catch (IOException e) {
logger.error("Failed reading .zip file", e);
}
}
}
// remove all non .txt or .zip messages
attachments.entrySet().removeIf(entry -> !txtPattern.matcher(entry.getKey()).matches());
return attachments;
}
public static TreeMap<String, InputStream> getAttachments(Message message) {
Object content;
try {
content = message.getContent();
if (content instanceof String) {
return new TreeMap<String, InputStream>();
}
if (content instanceof Multipart) {
Multipart multipart = (Multipart) content;
TreeMap<String, InputStream> result = new TreeMap<>();
for (int i = 0; i < multipart.getCount(); i++) {
result.putAll(getAttachments(multipart.getBodyPart(i)));
}
return result;
}
return null;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
private static TreeMap<String, InputStream> getAttachments(BodyPart part) throws Exception {
TreeMap<String, InputStream> result = new TreeMap<>();
Object content = part.getContent();
if (content instanceof InputStream || content instanceof String) {
if (Part.ATTACHMENT.equalsIgnoreCase(part.getDisposition()) || StringUtils.isNotBlank(part.getFileName())) {
result.put(part.getFileName(), part.getInputStream());
return result;
} else {
return new TreeMap<String, InputStream>();
}
}
if (content instanceof Multipart) {
Multipart multipart = (Multipart) content;
for (int i = 0; i < multipart.getCount(); i++) {
BodyPart bodyPart = multipart.getBodyPart(i);
result.putAll(getAttachments(bodyPart));
}
}
return result;
}
static boolean isValid(Message message) {
Map attachments = getCleanAttachments(message);
if (attachments.size() == 1) {
return true;
} else if (attachments.size() == 0) {
logger.info("Number of attachments is 0");
}
return false;
}
}
|
package org.exist.storage;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.StreamTokenizer;
import java.util.Observable;
import java.util.TreeSet;
import org.apache.log4j.Logger;
import org.exist.collections.Collection;
import org.exist.dom.AttrImpl;
import org.exist.dom.DocumentImpl;
import org.exist.dom.DocumentSet;
import org.exist.dom.NodeImpl;
import org.exist.dom.NodeSet;
import org.exist.dom.TextImpl;
import org.exist.security.PermissionDeniedException;
import org.exist.security.User;
import org.exist.storage.analysis.SimpleTokenizer;
import org.exist.storage.analysis.Tokenizer;
import org.exist.util.Configuration;
import org.exist.util.Occurrences;
import org.exist.util.PorterStemmer;
import org.exist.xquery.TerminatedException;
import org.exist.xquery.XQueryContext;
/**
* This is the base class for all classes providing access to the fulltext index.
*
* The class has methods to add text and attribute nodes to the fulltext index,
* or to search for nodes matching selected search terms.
*
* @author wolf
*/
public abstract class TextSearchEngine extends Observable {
protected final static Logger LOG =
Logger.getLogger(TextSearchEngine.class);
protected TreeSet stoplist = new TreeSet();
protected DBBroker broker = null;
protected Tokenizer tokenizer;
protected Configuration config;
protected boolean indexNumbers = false, stem = false;
protected PorterStemmer stemmer = null;
/**
* Construct a new instance and configure it.
*
* @param broker
* @param conf
*/
public TextSearchEngine(DBBroker broker, Configuration conf) {
this.broker = broker;
this.config = conf;
String stopword, tokenizerClass;
Boolean num, stemming;
if ((num = (Boolean) config.getProperty("indexer.indexNumbers"))
!= null)
indexNumbers = num.booleanValue();
if ((stemming = (Boolean) config.getProperty("indexer.stem")) != null)
stem = stemming.booleanValue();
if ((tokenizerClass = (String) config.getProperty("indexer.tokenizer"))
!= null) {
try {
Class tokClass = Class.forName(tokenizerClass);
tokenizer = (Tokenizer) tokClass.newInstance();
LOG.debug("using tokenizer: " + tokenizerClass);
} catch (ClassNotFoundException e) {
LOG.debug(e);
} catch (InstantiationException e) {
LOG.debug(e);
} catch (IllegalAccessException e) {
LOG.debug(e);
}
}
if (tokenizer == null) {
LOG.debug("using simple tokenizer");
tokenizer = new SimpleTokenizer();
}
if (stem)
stemmer = new PorterStemmer();
tokenizer.setStemming(stem);
if ((stopword = (String) config.getProperty("stopwords")) != null) {
try {
FileReader in = new FileReader(stopword);
StreamTokenizer tok = new StreamTokenizer(in);
int next = tok.nextToken();
while (next != StreamTokenizer.TT_EOF) {
if (next != StreamTokenizer.TT_WORD)
continue;
stoplist.add(tok.sval);
next = tok.nextToken();
}
} catch (FileNotFoundException e) {
LOG.debug(e);
} catch (IOException e) {
LOG.debug(e);
}
}
}
/**
* Returns the Tokenizer used for tokenizing strings into
* words.
*
* @return
*/
public Tokenizer getTokenizer() {
return tokenizer;
}
/**
* Tokenize and index the given text node.
*
* @param idx
* @param text
*/
public abstract void storeText(IndexPaths idx, TextImpl text, boolean onetoken);
/**
* Tokenize and index the given attribute node.
*
* @param idx
* @param text
*/
public abstract void storeAttribute(IndexPaths idx, AttrImpl text);
public abstract void flush();
public abstract void close();
/**
* For each of the given search terms and each of the documents in the
* document set, return a node-set of matching nodes.
*
* This method uses MATCH_EXACT for comparing search terms.
*
* @param doc
* @param expr
* @return
*/
public NodeSet getNodesContaining(XQueryContext context, DocumentSet docs, NodeSet contextSet,
String expr) throws TerminatedException {
return getNodesContaining(context, docs, contextSet, expr, DBBroker.MATCH_EXACT);
}
/**
* For each of the given search terms and each of the documents in the
* document set, return a node-set of matching nodes.
*
* The type-argument indicates if search terms should be compared using
* a regular expression. Valid values are DBBroker.MATCH_EXACT or
* DBBroker.MATCH_REGEXP.
*
* @param doc
* @param expr
* @return
*/
public abstract NodeSet getNodesContaining(XQueryContext context, DocumentSet docs,
NodeSet contextSet, String expr, int type) throws TerminatedException;
public abstract NodeSet getNodes(XQueryContext context, DocumentSet docs, NodeSet contextSet,
TermMatcher matcher, CharSequence startTerm) throws TerminatedException;
public abstract Occurrences[] scanIndexTerms(
User user,
Collection collection,
String start,
String end,
boolean inclusive) throws PermissionDeniedException;
public abstract String[] getIndexTerms(DocumentSet docs, TermMatcher matcher);
/**
* Remove index entries for an entire collection.
*
* @param collection
*/
public abstract void removeCollection(Collection collection);
/**
* Remove all index entries for the given document.
*
* @param doc
*/
public abstract void removeDocument(DocumentImpl doc);
/**
* Reindex a document or node.
*
* If node is null, all levels of the document tree starting with
* DocumentImpl.reindexRequired() will be reindexed.
*
* @param oldDoc
* @param node
*/
public abstract void reindex(DocumentImpl oldDoc, NodeImpl node);
}
|
package be.ibridge.kettle.trans.step.dimensionlookup;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.Hashtable;
import org.eclipse.swt.widgets.Shell;
import org.w3c.dom.Node;
import be.ibridge.kettle.core.CheckResult;
import be.ibridge.kettle.core.Const;
import be.ibridge.kettle.core.LogWriter;
import be.ibridge.kettle.core.Row;
import be.ibridge.kettle.core.SQLStatement;
import be.ibridge.kettle.core.XMLHandler;
import be.ibridge.kettle.core.database.Database;
import be.ibridge.kettle.core.database.DatabaseMeta;
import be.ibridge.kettle.core.exception.KettleDatabaseException;
import be.ibridge.kettle.core.exception.KettleException;
import be.ibridge.kettle.core.exception.KettleStepException;
import be.ibridge.kettle.core.exception.KettleXMLException;
import be.ibridge.kettle.core.value.Value;
import be.ibridge.kettle.repository.Repository;
import be.ibridge.kettle.trans.DatabaseImpact;
import be.ibridge.kettle.trans.Trans;
import be.ibridge.kettle.trans.TransMeta;
import be.ibridge.kettle.trans.step.BaseStepMeta;
import be.ibridge.kettle.trans.step.StepDataInterface;
import be.ibridge.kettle.trans.step.StepDialogInterface;
import be.ibridge.kettle.trans.step.StepInterface;
import be.ibridge.kettle.trans.step.StepMeta;
import be.ibridge.kettle.trans.step.StepMetaInterface;
import be.ibridge.kettle.trans.step.dimensionlookup.Messages;
public class DimensionLookupMeta extends BaseStepMeta implements StepMetaInterface
{
public final static int TYPE_UPDATE_DIM_INSERT = 0;
public final static int TYPE_UPDATE_DIM_UPDATE = 1;
public final static int TYPE_UPDATE_DIM_PUNCHTHROUGH = 2;
public final static String typeDesc[] = { Messages.getString("DimensionLookupMeta.TypeDesc.Insert"), Messages.getString("DimensionLookupMeta.TypeDesc.Update"), Messages.getString("DimensionLookupMeta.TypeDesc.PunchThrough") }; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
public final static String typeDescLookup[] = Value.getTypes();
/** The lookup table*/
private String tableName;
/** The database connection */
private DatabaseMeta databaseMeta;
/** Update the dimension or just lookup? */
private boolean update;
/** Fields used to look up a value in the dimension */
private String keyStream[];
/** Fields in the dimension to use for lookup */
private String keyLookup[];
/** The field to use for date range lookup in the dimension */
private String dateField;
/** The 'from' field of the date range in the dimension */
private String dateFrom;
/** The 'to' field of the date range in the dimension */
private String dateTo;
/** Fields containing the values in the input stream to update the dimension with */
private String fieldStream[];
/** Fields in the dimension to update or retrieve */
private String fieldLookup[];
/** The type of update to perform on the fields: insert, update, punch-through */
private int fieldUpdate[];
/** Name of the technical key (surrogate key) field to return from the dimension */
private String keyField;
/** New name of the technical key field */
private String keyRename;
/** Use auto increment field as TK */
private boolean autoIncrement;
/** The name of the version field */
private String versionField;
/** Sequence name to get the sequence from */
private String sequenceName;
/** Default value in case nothing was found */
private Value notFound;
/** The number of rows between commits */
private int commitSize;
/** The year to use as minus infinity in the dimensions date range */
private int minYear;
/** The year to use as plus infinity in the dimensions date range */
private int maxYear;
/** Which method to use for the creation of the tech key */
private String techKeyCreation = null;
public static String CREATION_METHOD_AUTOINC = "autoinc";
public static String CREATION_METHOD_SEQUENCE = "sequence";
public static String CREATION_METHOD_TABLEMAX = "tablemax";
public DimensionLookupMeta()
{
super(); // allocate BaseStepMeta
}
/**
* @return Returns the tablename.
*/
public String getTableName()
{
return tableName;
}
/**
* @param tablename
* The tablename to set.
*/
public void setTableName(String tablename)
{
this.tableName = tablename;
}
/**
* @return Returns the database.
*/
public DatabaseMeta getDatabaseMeta()
{
return databaseMeta;
}
/**
* @param database
* The database to set.
*/
public void setDatabaseMeta(DatabaseMeta database)
{
this.databaseMeta = database;
}
/**
* @return Returns the update.
*/
public boolean isUpdate()
{
return update;
}
/**
* @param update
* The update to set.
*/
public void setUpdate(boolean update)
{
this.update = update;
}
/**
* @return Returns the autoIncrement.
*/
public boolean isAutoIncrement()
{
return autoIncrement;
}
/**
* @param autoIncrement The autoIncrement to set.
*/
public void setAutoIncrement(boolean autoIncrement)
{
this.autoIncrement = autoIncrement;
}
/**
* Set the way how the technical key field should be created.
*
* @param techKeyCreation which method to use for the creation
* of the technical key.
*/
public void setTechKeyCreation(String techKeyCreation)
{
this.techKeyCreation = techKeyCreation;
}
/**
* Get the way how the technical key field should be created.
*
* @return creation way for the technical key.
*/
public String getTechKeyCreation()
{
return this.techKeyCreation;
}
/**
* @return Returns the commitSize.
*/
public int getCommitSize()
{
return commitSize;
}
/**
* @param commitSize The commitSize to set.
*/
public void setCommitSize(int commitSize)
{
this.commitSize = commitSize;
}
/**
* @return Returns the dateField.
*/
public String getDateField()
{
return dateField;
}
/**
* @param dateField The dateField to set.
*/
public void setDateField(String dateField)
{
this.dateField = dateField;
}
/**
* @return Returns the dateFrom.
*/
public String getDateFrom()
{
return dateFrom;
}
/**
* @param dateFrom The dateFrom to set.
*/
public void setDateFrom(String dateFrom)
{
this.dateFrom = dateFrom;
}
/**
* @return Returns the dateTo.
*/
public String getDateTo()
{
return dateTo;
}
/**
* @param dateTo The dateTo to set.
*/
public void setDateTo(String dateTo)
{
this.dateTo = dateTo;
}
/**
* @return Fields in the dimension to update or retrieve.
*/
public String[] getFieldLookup()
{
return fieldLookup;
}
/**
* @param fieldLookup sets the fields in the dimension to update or retrieve.
*/
public void setFieldLookup(String[] fieldLookup)
{
this.fieldLookup = fieldLookup;
}
/**
* @return Fields containing the values in the input stream to update the dimension with.
*/
public String[] getFieldStream()
{
return fieldStream;
}
/**
* @param fieldStream The fields containing the values in the input stream to update the dimension with.
*/
public void setFieldStream(String[] fieldStream)
{
this.fieldStream = fieldStream;
}
/**
* @return Returns the fieldUpdate.
*/
public int[] getFieldUpdate()
{
return fieldUpdate;
}
/**
* @param fieldUpdate The fieldUpdate to set.
*/
public void setFieldUpdate(int[] fieldUpdate)
{
this.fieldUpdate = fieldUpdate;
}
/**
* @return Returns the keyField.
*/
public String getKeyField()
{
return keyField;
}
/**
* @param keyField The keyField to set.
*/
public void setKeyField(String keyField)
{
this.keyField = keyField;
}
/**
* @return Returns the keyLookup.
*/
public String[] getKeyLookup()
{
return keyLookup;
}
/**
* @param keyLookup The keyLookup to set.
*/
public void setKeyLookup(String[] keyLookup)
{
this.keyLookup = keyLookup;
}
/**
* @return Returns the keyRename.
*/
public String getKeyRename()
{
return keyRename;
}
/**
* @param keyRename The keyRename to set.
*/
public void setKeyRename(String keyRename)
{
this.keyRename = keyRename;
}
/**
* @return Returns the keyStream.
*/
public String[] getKeyStream()
{
return keyStream;
}
/**
* @param keyStream The keyStream to set.
*/
public void setKeyStream(String[] keyStream)
{
this.keyStream = keyStream;
}
/**
* @return Returns the maxYear.
*/
public int getMaxYear()
{
return maxYear;
}
/**
* @param maxYear The maxYear to set.
*/
public void setMaxYear(int maxYear)
{
this.maxYear = maxYear;
}
/**
* @return Returns the minYear.
*/
public int getMinYear()
{
return minYear;
}
/**
* @param minYear The minYear to set.
*/
public void setMinYear(int minYear)
{
this.minYear = minYear;
}
/**
* @return Returns the notFound.
*/
public Value getNotFound()
{
return notFound;
}
/**
* @param notFound The notFound to set.
*/
public void setNotFound(Value notFound)
{
this.notFound = notFound;
}
/**
* @return Returns the sequenceName.
*/
public String getSequenceName()
{
return sequenceName;
}
/**
* @param sequenceName The sequenceName to set.
*/
public void setSequenceName(String sequenceName)
{
this.sequenceName = sequenceName;
}
/**
* @return Returns the versionField.
*/
public String getVersionField()
{
return versionField;
}
/**
* @param versionField The versionField to set.
*/
public void setVersionField(String versionField)
{
this.versionField = versionField;
}
public void loadXML(Node stepnode, ArrayList databases, Hashtable counters) throws KettleXMLException
{
readData(stepnode, databases);
}
public void allocate(int nrkeys, int nrfields)
{
keyStream = new String[nrkeys];
keyLookup = new String[nrkeys];
fieldStream = new String[nrfields];
fieldLookup = new String[nrfields];
fieldUpdate = new int[nrfields];
}
public Object clone()
{
DimensionLookupMeta retval = (DimensionLookupMeta) super.clone();
int nrkeys = keyStream.length;
int nrfields = fieldStream.length;
retval.allocate(nrkeys, nrfields);
for (int i = 0; i < nrkeys; i++)
{
retval.keyStream[i] = keyStream[i];
retval.keyLookup[i] = keyLookup[i];
}
for (int i = 0; i < nrfields; i++)
{
retval.fieldStream[i] = fieldStream[i];
retval.fieldLookup[i] = fieldLookup[i];
retval.fieldUpdate[i] = fieldUpdate[i];
}
return retval;
}
public final static int getUpdateType(boolean upd, String ty)
{
if (upd)
{
for (int i = 0; i < typeDesc.length; i++)
{
if (typeDesc[i].equalsIgnoreCase(ty))
return i;
}
if ("Y".equalsIgnoreCase(ty)) //$NON-NLS-1$
return TYPE_UPDATE_DIM_PUNCHTHROUGH;
return TYPE_UPDATE_DIM_INSERT; // INSERT is the default: don't lose information.
}
else
{
int retval = Value.getType(ty);
if (retval == Value.VALUE_TYPE_NONE)
retval = Value.VALUE_TYPE_STRING;
return retval;
}
}
public final static String getUpdateType(boolean upd, int t)
{
if (!upd)
return Value.getTypeDesc(t);
else
return typeDesc[t];
}
private void readData(Node stepnode, ArrayList databases) throws KettleXMLException
{
try
{
String upd;
int nrkeys, nrfields;
String commit;
tableName = XMLHandler.getTagValue(stepnode, "table"); //$NON-NLS-1$
String con = XMLHandler.getTagValue(stepnode, "connection"); //$NON-NLS-1$
databaseMeta = Const.findDatabase(databases, con);
commit = XMLHandler.getTagValue(stepnode, "commit"); //$NON-NLS-1$
commitSize = Const.toInt(commit, 0);
upd = XMLHandler.getTagValue(stepnode, "update"); //$NON-NLS-1$
if (upd.equalsIgnoreCase("Y")) //$NON-NLS-1$
update = true;
else
update = false;
Node fields = XMLHandler.getSubNode(stepnode, "fields"); //$NON-NLS-1$
nrkeys = XMLHandler.countNodes(fields, "key"); //$NON-NLS-1$
nrfields = XMLHandler.countNodes(fields, "field"); //$NON-NLS-1$
allocate(nrkeys, nrfields);
// Read keys to dimension
for (int i = 0; i < nrkeys; i++)
{
Node knode = XMLHandler.getSubNodeByNr(fields, "key", i); //$NON-NLS-1$
keyStream[i] = XMLHandler.getTagValue(knode, "name"); //$NON-NLS-1$
keyLookup[i] = XMLHandler.getTagValue(knode, "lookup"); //$NON-NLS-1$
}
// Only one date is supported
// No datefield: use system date...
Node dnode = XMLHandler.getSubNode(fields, "date"); //$NON-NLS-1$
dateField = XMLHandler.getTagValue(dnode, "name"); //$NON-NLS-1$
dateFrom = XMLHandler.getTagValue(dnode, "from"); //$NON-NLS-1$
dateTo = XMLHandler.getTagValue(dnode, "to"); //$NON-NLS-1$
for (int i = 0; i < nrfields; i++)
{
Node fnode = XMLHandler.getSubNodeByNr(fields, "field", i); //$NON-NLS-1$
fieldStream[i] = XMLHandler.getTagValue(fnode, "name"); //$NON-NLS-1$
fieldLookup[i] = XMLHandler.getTagValue(fnode, "lookup"); //$NON-NLS-1$
upd = XMLHandler.getTagValue(fnode, "update"); //$NON-NLS-1$
fieldUpdate[i] = getUpdateType(update, upd);
}
if (update)
{
// If this is empty: use auto-increment field!
sequenceName = XMLHandler.getTagValue(stepnode, "sequence"); //$NON-NLS-1$
}
maxYear = Const.toInt(XMLHandler.getTagValue(stepnode, "max_year"), Const.MAX_YEAR); //$NON-NLS-1$
minYear = Const.toInt(XMLHandler.getTagValue(stepnode, "min_year"), Const.MIN_YEAR); //$NON-NLS-1$
keyField = XMLHandler.getTagValue(fields, "return", "name"); //$NON-NLS-1$ //$NON-NLS-2$
keyRename = XMLHandler.getTagValue(fields, "return", "rename"); //$NON-NLS-1$ //$NON-NLS-2$
autoIncrement = !"N".equalsIgnoreCase(XMLHandler.getTagValue(fields, "return", "use_autoinc")); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
versionField = XMLHandler.getTagValue(fields, "return", "version"); //$NON-NLS-1$ //$NON-NLS-2$
setTechKeyCreation(XMLHandler.getTagValue(fields, "return", "creation_method")); //$NON-NLS-1$
}
catch (Exception e)
{
throw new KettleXMLException(Messages.getString("DimensionLookupMeta.Exception.UnableToLoadStepInfoFromXML"), e); //$NON-NLS-1$
}
}
public void setDefault()
{
int nrkeys, nrfields;
tableName = Messages.getString("DimensionLookupMeta.DefualtTableName"); //$NON-NLS-1$
databaseMeta = null;
commitSize = 0;
update = true;
nrkeys = 0;
nrfields = 0;
allocate(nrkeys, nrfields);
// Read keys to dimension
for (int i = 0; i < nrkeys; i++)
{
keyStream[i] = "key" + i; //$NON-NLS-1$
keyLookup[i] = "keylookup" + i; //$NON-NLS-1$
}
for (int i = 0; i < nrfields; i++)
{
fieldStream[i] = "field" + i; //$NON-NLS-1$
fieldLookup[i] = "lookup" + i; //$NON-NLS-1$
fieldUpdate[i] = DimensionLookupMeta.TYPE_UPDATE_DIM_INSERT;
}
// Only one date is supported
// No datefield: use system date...
dateField = ""; //$NON-NLS-1$
dateFrom = "date_from"; //$NON-NLS-1$
dateTo = "date_to"; //$NON-NLS-1$
minYear = Const.MIN_YEAR;
maxYear = Const.MAX_YEAR;
keyField = ""; //$NON-NLS-1$
keyRename = ""; //$NON-NLS-1$
autoIncrement = false;
versionField = "version"; //$NON-NLS-1$
}
public Row getFields(Row r, String name, Row info) throws KettleStepException
{
LogWriter log = LogWriter.getInstance();
Row row;
if (r == null)
row = new Row(); // give back values
else
row = r; // add to the existing row of values...
Value v = new Value(keyField, Value.VALUE_TYPE_INTEGER);
if (keyRename != null && keyRename.length() > 0)
v.setName(keyRename);
v.setLength(9, 0);
v.setOrigin(name);
row.addValue(v);
// retrieve extra fields on lookup?
// Don't bother if there are no return values specified.
if (!update && fieldLookup.length>0)
{
try
{
// Get the rows from the table...
if (databaseMeta!=null)
{
Database db = new Database(databaseMeta);
Row extraFields = db.getTableFields(tableName);
for (int i = 0; i < fieldLookup.length; i++)
{
v = extraFields.searchValue(fieldLookup[i]);
if (v==null)
{
String message = Messages.getString("DimensionLookupMeta.Exception.UnableToFindReturnField",fieldLookup[i]); //$NON-NLS-1$ //$NON-NLS-2$
log.logError(toString(), message);
throw new KettleStepException(message);
}
// If the field needs to be renamed, rename
if (fieldStream[i] != null && fieldStream[i].length() > 0)
{
v.setName(fieldStream[i]);
}
v.setOrigin(name);
row.addValue(v);
}
}
else
{
String message = Messages.getString("DimensionLookupMeta.Exception.UnableToRetrieveDataTypeOfReturnField"); //$NON-NLS-1$
log.logError(toString(), message);
throw new KettleStepException(message);
}
}
catch(Exception e)
{
String message = Messages.getString("DimensionLookupMeta.Exception.UnableToRetrieveDataTypeOfReturnField2"); //$NON-NLS-1$
log.logError(toString(), message);
throw new KettleStepException(message, e);
}
}
return row;
}
public String getXML()
{
StringBuffer retval = new StringBuffer(512);
retval.append(" ").append(XMLHandler.addTagValue("table", tableName)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("connection", databaseMeta == null ? "" : databaseMeta.getName())); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
retval.append(" ").append(XMLHandler.addTagValue("commit", commitSize)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("update", update)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" <fields>").append(Const.CR); //$NON-NLS-1$
for (int i = 0; i < keyStream.length; i++)
{
retval.append(" <key>").append(Const.CR); //$NON-NLS-1$
retval.append(" ").append(XMLHandler.addTagValue("name", keyStream[i])); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("lookup", keyLookup[i])); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" </key>" + Const.CR); //$NON-NLS-1$
}
retval.append(" <date>").append(Const.CR); //$NON-NLS-1$
retval.append(" ").append(XMLHandler.addTagValue("name", dateField)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("from", dateFrom)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("to", dateTo)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" </date>").append(Const.CR); //$NON-NLS-1$
if (fieldStream != null)
for (int i = 0; i < fieldStream.length; i++)
{
if (fieldStream[i] != null)
{
retval.append(" <field>").append(Const.CR); //$NON-NLS-1$
retval.append(" ").append(XMLHandler.addTagValue("name", fieldStream[i])); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("lookup", fieldLookup[i])); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("update", getUpdateType(update, fieldUpdate[i]))); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" </field>").append(Const.CR); //$NON-NLS-1$
}
}
retval.append(" <return>").append(Const.CR); //$NON-NLS-1$
retval.append(" ").append(XMLHandler.addTagValue("name", keyField)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("rename", keyRename)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("creation_method", techKeyCreation)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("use_autoinc", autoIncrement)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("version", versionField)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" </return>").append(Const.CR); //$NON-NLS-1$
retval.append(" </fields>").append(Const.CR); //$NON-NLS-1$
// If sequence is empty: use auto-increment field!
retval.append(" ").append(XMLHandler.addTagValue("sequence", sequenceName)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("min_year", minYear)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("max_year", maxYear)); //$NON-NLS-1$ //$NON-NLS-2$
return retval.toString();
}
public void readRep(Repository rep, long id_step, ArrayList databases, Hashtable counters) throws KettleException
{
try
{
long id_connection = rep.getStepAttributeInteger(id_step, "id_connection"); //$NON-NLS-1$
databaseMeta = Const.findDatabase(databases, id_connection);
tableName = rep.getStepAttributeString(id_step, "table"); //$NON-NLS-1$
commitSize = (int) rep.getStepAttributeInteger(id_step, "commit"); //$NON-NLS-1$
update = rep.getStepAttributeBoolean(id_step, "update"); //$NON-NLS-1$
int nrkeys = rep.countNrStepAttributes(id_step, "lookup_key_name"); //$NON-NLS-1$
int nrfields = rep.countNrStepAttributes(id_step, "field_name"); //$NON-NLS-1$
allocate(nrkeys, nrfields);
for (int i = 0; i < nrkeys; i++)
{
keyStream[i] = rep.getStepAttributeString(id_step, i, "lookup_key_name"); //$NON-NLS-1$
keyLookup[i] = rep.getStepAttributeString(id_step, i, "lookup_key_field"); //$NON-NLS-1$
}
dateField = rep.getStepAttributeString(id_step, "date_name"); //$NON-NLS-1$
dateFrom = rep.getStepAttributeString(id_step, "date_from"); //$NON-NLS-1$
dateTo = rep.getStepAttributeString(id_step, "date_to"); //$NON-NLS-1$
for (int i = 0; i < nrfields; i++)
{
fieldStream[i] = rep.getStepAttributeString(id_step, i, "field_name"); //$NON-NLS-1$
fieldLookup[i] = rep.getStepAttributeString(id_step, i, "field_lookup"); //$NON-NLS-1$
fieldUpdate[i] = getUpdateType(update, rep.getStepAttributeString(id_step, i, "field_update")); //$NON-NLS-1$
}
keyField = rep.getStepAttributeString(id_step, "return_name"); //$NON-NLS-1$
keyRename = rep.getStepAttributeString(id_step, "return_rename"); //$NON-NLS-1$
autoIncrement = rep.getStepAttributeBoolean(id_step, "use_autoinc"); //$NON-NLS-1$
versionField = rep.getStepAttributeString(id_step, "version_field"); //$NON-NLS-1$
techKeyCreation = rep.getStepAttributeString(id_step, "creation_method"); //$NON-NLS-1$
sequenceName = rep.getStepAttributeString(id_step, "sequence"); //$NON-NLS-1$
minYear = (int) rep.getStepAttributeInteger(id_step, "min_year"); //$NON-NLS-1$
maxYear = (int) rep.getStepAttributeInteger(id_step, "max_year"); //$NON-NLS-1$
}
catch (Exception e)
{
throw new KettleException(Messages.getString("DimensionLookupMeta.Exception.UnexpectedErrorReadingStepInfoFromRepository"), e); //$NON-NLS-1$
}
}
public void saveRep(Repository rep, long id_transformation, long id_step) throws KettleException
{
try
{
rep.saveStepAttribute(id_transformation, id_step, "table", tableName); //$NON-NLS-1$
rep
.saveStepAttribute(id_transformation, id_step, "id_connection", databaseMeta == null ? -1 : databaseMeta //$NON-NLS-1$
.getID());
rep.saveStepAttribute(id_transformation, id_step, "commit", commitSize); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "update", update); //$NON-NLS-1$
for (int i = 0; i < keyStream.length; i++)
{
rep.saveStepAttribute(id_transformation, id_step, i, "lookup_key_name", keyStream[i]); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, i, "lookup_key_field", keyLookup[i]); //$NON-NLS-1$
}
rep.saveStepAttribute(id_transformation, id_step, "date_name", dateField); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "date_from", dateFrom); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "date_to", dateTo); //$NON-NLS-1$
if (fieldStream != null)
for (int i = 0; i < fieldStream.length; i++)
{
if (fieldStream[i] != null)
{
rep.saveStepAttribute(id_transformation, id_step, i, "field_name", fieldStream[i]); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, i, "field_lookup", fieldLookup[i]); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, i, "field_update", //$NON-NLS-1$
getUpdateType(update, fieldUpdate[i]));
}
}
rep.saveStepAttribute(id_transformation, id_step, "return_name", keyField); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "return_rename", keyRename); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "creation_method", techKeyCreation); //$NON-NLS-1$
// For the moment still save 'use_autoinc' for backwards compatibility (Sven Boden).
rep.saveStepAttribute(id_transformation, id_step, "use_autoinc", autoIncrement); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "version_field", versionField); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "sequence", sequenceName); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "min_year", minYear); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "max_year", maxYear); //$NON-NLS-1$
// Also, save the step-database relationship!
if (databaseMeta != null)
rep.insertStepDatabase(id_transformation, id_step, databaseMeta.getID());
}
catch (KettleDatabaseException dbe)
{
throw new KettleException(Messages.getString("DimensionLookupMeta.Exception.UnableToLoadDimensionLookupInfoFromRepository"), dbe); //$NON-NLS-1$
}
}
public Date getMinDate()
{
Calendar mincal = Calendar.getInstance();
mincal.set(Calendar.YEAR, minYear);
mincal.set(Calendar.MONTH, 0);
mincal.set(Calendar.DAY_OF_MONTH, 1);
mincal.set(Calendar.HOUR_OF_DAY, 0);
mincal.set(Calendar.MINUTE, 0);
mincal.set(Calendar.SECOND, 0);
mincal.set(Calendar.MILLISECOND, 0);
return mincal.getTime();
}
public Date getMaxDate()
{
Calendar mincal = Calendar.getInstance();
mincal.set(Calendar.YEAR, maxYear);
mincal.set(Calendar.MONTH, 11);
mincal.set(Calendar.DAY_OF_MONTH, 31);
mincal.set(Calendar.HOUR_OF_DAY, 23);
mincal.set(Calendar.MINUTE, 59);
mincal.set(Calendar.SECOND, 59);
mincal.set(Calendar.MILLISECOND, 999);
return mincal.getTime();
}
public void check(ArrayList remarks, StepMeta stepinfo, Row prev, String input[], String output[], Row info)
{
if (update)
checkUpdate(remarks, stepinfo, prev);
else
checkLookup(remarks, stepinfo, prev);
if ( techKeyCreation != null )
{
// post 2.2 version
if ( !(CREATION_METHOD_AUTOINC.equals(techKeyCreation) ||
CREATION_METHOD_SEQUENCE.equals(techKeyCreation) ||
CREATION_METHOD_TABLEMAX.equals(techKeyCreation)) )
{
String error_message = Messages.getString("DimensionLookupMeta.CheckResult.ErrorTechKeyCreation")+ ": " + techKeyCreation +"!"; //$NON-NLS-1$ //$NON-NLS-2$
CheckResult cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
remarks.add(cr);
}
}
// See if we have input streams leading to this step!
if (input.length > 0)
{
CheckResult cr = new CheckResult(CheckResult.TYPE_RESULT_OK, Messages.getString("DimensionLookupMeta.CheckResult.StepReceiveInfoOK"), //$NON-NLS-1$
stepinfo);
remarks.add(cr);
}
else
{
CheckResult cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, Messages.getString("DimensionLookupMeta.CheckResult.NoInputReceiveFromOtherSteps"), //$NON-NLS-1$
stepinfo);
remarks.add(cr);
}
}
private void checkUpdate(ArrayList remarks, StepMeta stepinfo, Row prev)
{
LogWriter log = LogWriter.getInstance();
CheckResult cr;
String error_message = ""; //$NON-NLS-1$
if (databaseMeta != null)
{
Database db = new Database(databaseMeta);
try
{
db.connect();
if (tableName != null && tableName.length() != 0)
{
boolean first = true;
boolean error_found = false;
error_message = ""; //$NON-NLS-1$
Row r = db.getTableFields(tableName);
if (r != null)
{
for (int i = 0; i < fieldLookup.length; i++)
{
String lufield = fieldLookup[i];
log.logDebug(toString(), Messages.getString("DimensionLookupMeta.Log.CheckLookupField") + i + " --> " + lufield //$NON-NLS-1$ //$NON-NLS-2$
+ " in lookup table..."); //$NON-NLS-1$
Value v = r.searchValue(lufield);
if (v == null)
{
if (first)
{
first = false;
error_message += Messages.getString("DimensionLookupMeta.CheckResult.MissingCompareFieldsInTargetTable") + Const.CR; //$NON-NLS-1$
}
error_found = true;
error_message += "\t\t" + lufield + Const.CR; //$NON-NLS-1$
}
}
if (error_found)
{
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
}
else
{
cr = new CheckResult(CheckResult.TYPE_RESULT_OK,
Messages.getString("DimensionLookupMeta.CheckResult.AllLookupFieldFound"), stepinfo); //$NON-NLS-1$
}
remarks.add(cr);
/* Also, check the fields: tk, version, from-to, ... */
if (keyField!=null && keyField.length()>0)
{
if (r.searchValueIndex(keyField) < 0)
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.TechnicalKeyNotFound",keyField ) //$NON-NLS-1$ //$NON-NLS-2$
+ Const.CR;
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.TechnicalKeyFound",keyField ) //$NON-NLS-1$ //$NON-NLS-2$
+ Const.CR;
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, error_message, stepinfo);
}
remarks.add(cr);
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.TechnicalKeyRequired") + Const.CR; //$NON-NLS-1$
remarks.add( new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo) );
}
if (versionField != null && versionField.length() > 0)
{
if (r.searchValueIndex(versionField) < 0)
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.VersionFieldNotFound", versionField //$NON-NLS-1$
) + Const.CR; //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.VersionFieldFound", versionField ) + Const.CR; //$NON-NLS-1$ //$NON-NLS-2$
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, error_message, stepinfo);
}
remarks.add(cr);
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.VersionKeyRequired") + Const.CR; //$NON-NLS-1$
remarks.add( new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo) );
}
if (dateFrom != null && dateFrom.length() > 0)
{
if (r.searchValueIndex(dateFrom) < 0)
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.StartPointOfDaterangeNotFound", dateFrom //$NON-NLS-1$
) + Const.CR; //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.StartPointOfDaterangeFound", dateFrom //$NON-NLS-1$
) + Const.CR; //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, error_message, stepinfo);
}
remarks.add(cr);
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.StartKeyRequired") + Const.CR; //$NON-NLS-1$
remarks.add( new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo) );
}
if (dateTo != null && dateTo.length() > 0)
{
if (r.searchValueIndex(dateTo) < 0)
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.EndPointOfDaterangeNotFound", dateTo //$NON-NLS-1$
) + Const.CR; //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.EndPointOfDaterangeFound", dateTo //$NON-NLS-1$
) + Const.CR; //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, error_message, stepinfo);
}
remarks.add(cr);
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.EndKeyRequired") + Const.CR; //$NON-NLS-1$
remarks.add( new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo) );
}
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.CouldNotReadTableInfo"); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
remarks.add(cr);
}
}
// Look up fields in the input stream <prev>
if (prev != null && prev.size() > 0)
{
boolean first = true;
error_message = ""; //$NON-NLS-1$
boolean error_found = false;
for (int i = 0; i < fieldStream.length; i++)
{
log.logDebug(toString(), Messages.getString("DimensionLookupMeta.Log.CheckField" ,i + " --> " + fieldStream[i])); //$NON-NLS-1$
Value v = prev.searchValue(fieldStream[i]);
if (v == null)
{
if (first)
{
first = false;
error_message += Messages.getString("DimensionLookupMeta.CheckResult.MissongFields") + Const.CR; //$NON-NLS-1$
}
error_found = true;
error_message += "\t\t" + fieldStream[i] + Const.CR; //$NON-NLS-1$
}
}
if (error_found)
{
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
}
else
{
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, Messages.getString("DimensionLookupMeta.CheckResult.AllFieldsFound"), //$NON-NLS-1$
stepinfo);
}
remarks.add(cr);
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.CouldNotReadFieldsFromPreviousStep") + Const.CR; //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
remarks.add(cr);
}
// Check sequence
if (databaseMeta.supportsSequences() && CREATION_METHOD_SEQUENCE.equals(getTechKeyCreation()) &&
sequenceName != null && sequenceName.length() != 0)
{
if (db.checkSequenceExists(sequenceName))
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.SequenceExists", sequenceName ); //$NON-NLS-1$ //$NON-NLS-2$
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, error_message, stepinfo);
remarks.add(cr);
}
else
{
error_message += Messages.getString("DimensionLookupMeta.CheckResult.SequenceCouldNotFound", sequenceName ); //$NON-NLS-1$ //$NON-NLS-2$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
remarks.add(cr);
}
}
}
catch (KettleException e)
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.CouldNotConectToDB") + e.getMessage(); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
remarks.add(cr);
}
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.InvalidConnectionName"); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
remarks.add(cr);
}
}
private void checkLookup(ArrayList remarks, StepMeta stepinfo, Row prev)
{
int i;
boolean error_found = false;
String error_message = ""; //$NON-NLS-1$
boolean first;
CheckResult cr;
if (databaseMeta != null)
{
Database db = new Database(databaseMeta);
try
{
db.connect();
if (tableName != null && tableName.length() != 0)
{
Row tableFields = db.getTableFields(tableName);
if (tableFields != null)
{
if (prev != null && prev.size() > 0)
{
// Start at the top, see if the key fields exist:
first = true;
boolean warning_found = false;
for (i = 0; i < keyStream.length; i++)
{
// Does the field exist in the input stream?
String strfield = keyStream[i];
Value strvalue = prev.searchValue(strfield);
if (strvalue == null)
{
if (first)
{
first = false;
error_message += Messages.getString("DimensionLookupMeta.CheckResult.KeyhasProblem") + Const.CR; //$NON-NLS-1$
}
error_found = true;
error_message += "\t\t" + keyField +Messages.getString("DimensionLookupMeta.CheckResult.KeyNotPresentInStream") + Const.CR; //$NON-NLS-1$ //$NON-NLS-2$
}
else
{
// does the field exist in the dimension table?
String dimfield = keyLookup[i];
Value dimvalue = tableFields.searchValue(dimfield);
if (dimvalue == null)
{
if (first)
{
first = false;
error_message += Messages.getString("DimensionLookupMeta.CheckResult.KeyhasProblem2") + Const.CR; //$NON-NLS-1$
}
error_found = true;
error_message += "\t\t" + dimfield +Messages.getString("DimensionLookupMeta.CheckResult.KeyNotPresentInDimensiontable") //$NON-NLS-1$ //$NON-NLS-2$
+ tableName + ")" + Const.CR; //$NON-NLS-1$
}
else
{
// Is the streamvalue of the same type as the dimension value?
if (strvalue.getType() != dimvalue.getType())
{
if (first)
{
first = false;
error_message += Messages.getString("DimensionLookupMeta.CheckResult.KeyhasProblem3") + Const.CR; //$NON-NLS-1$
}
warning_found = true;
error_message += "\t\t" + strfield + " (" + strvalue.getOrigin() //$NON-NLS-1$ //$NON-NLS-2$
+Messages.getString("DimensionLookupMeta.CheckResult.KeyNotTheSameTypeAs") + dimfield + " (" //$NON-NLS-1$ //$NON-NLS-2$
+ tableName + ")" + Const.CR; //$NON-NLS-1$
error_message += Messages.getString("DimensionLookupMeta.CheckResult.WarningInfoInDBConversion"); //$NON-NLS-1$
}
}
}
}
if (error_found)
{
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
}
else
if (warning_found)
{
cr = new CheckResult(CheckResult.TYPE_RESULT_WARNING, error_message, stepinfo);
}
else
{
cr = new CheckResult(
CheckResult.TYPE_RESULT_OK,
Messages.getString("DimensionLookupMeta.CheckResult.AllKeysFieldsFound"), //$NON-NLS-1$
stepinfo);
}
remarks.add(cr);
// In case of lookup, the first column of the UpIns dialog table contains the table field
error_found = false;
for (i = 0; i < fieldLookup.length; i++)
{
String lufield = fieldLookup[i];
if (lufield != null && lufield.length() > 0)
{
// Checking compare field: lufield
Value v = tableFields.searchValue(lufield);
if (v == null)
{
if (first)
{
first = false;
error_message += Messages.getString("DimensionLookupMeta.CheckResult.FieldsToRetrieveNotExistInDimension") //$NON-NLS-1$
+ Const.CR;
}
error_found = true;
error_message += "\t\t" + lufield + Const.CR; //$NON-NLS-1$
}
}
}
if (error_found)
{
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
}
else
{
cr = new CheckResult(CheckResult.TYPE_RESULT_OK,
Messages.getString("DimensionLookupMeta.CheckResult.AllFieldsToRetrieveFound"), stepinfo); //$NON-NLS-1$
}
remarks.add(cr);
/* Also, check the fields: tk, version, from-to, ... */
if (tableFields.searchValueIndex(keyField) < 0)
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.TechnicalKeyNotFound", keyField ) //$NON-NLS-1$ //$NON-NLS-2$
+ Const.CR;
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.TechnicalKeyFound",keyField ) //$NON-NLS-1$ //$NON-NLS-2$
+ Const.CR;
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, error_message, stepinfo);
}
remarks.add(cr);
if (tableFields.searchValueIndex(versionField) < 0)
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.VersionFieldNotFound",versionField //$NON-NLS-1$
) + Const.CR; //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.VersionFieldFound", versionField ) //$NON-NLS-1$ //$NON-NLS-2$
+ Const.CR;
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, error_message, stepinfo);
}
remarks.add(cr);
if (tableFields.searchValueIndex(dateFrom) < 0)
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.StartOfDaterangeFieldNotFound", dateFrom //$NON-NLS-1$
) + Const.CR; //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.StartOfDaterangeFieldFound", dateFrom //$NON-NLS-1$
) + Const.CR; //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, error_message, stepinfo);
}
remarks.add(cr);
if (tableFields.searchValueIndex(dateTo) < 0)
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.EndOfDaterangeFieldNotFound", dateTo //$NON-NLS-1$
) + Const.CR; //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.EndOfDaterangeFieldFound", dateTo //$NON-NLS-1$
); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, error_message, stepinfo);
}
remarks.add(cr);
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.CouldNotReadFieldsFromPreviousStep") + Const.CR; //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
remarks.add(cr);
}
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.CouldNotReadTableInfo"); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
remarks.add(cr);
}
}
}
catch (KettleException e)
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.CouldNotConnectDB") + e.getMessage(); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
remarks.add(cr);
}
}
else
{
error_message = Messages.getString("DimensionLookupMeta.CheckResult.InvalidConnection"); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepinfo);
remarks.add(cr);
}
}
public Row getTableFields()
{
LogWriter log = LogWriter.getInstance();
Row fields = null;
if (databaseMeta != null)
{
Database db = new Database(databaseMeta);
try
{
db.connect();
fields = db.getTableFields(tableName);
}
catch (KettleDatabaseException dbe)
{
log.logError(toString(), Messages.getString("DimensionLookupMeta.Log.DatabaseErrorOccurred") + dbe.getMessage()); //$NON-NLS-1$
}
finally
{
db.disconnect();
}
}
return fields;
}
public SQLStatement getSQLStatements(TransMeta transMeta, StepMeta stepMeta, Row prev)
{
LogWriter log = LogWriter.getInstance();
SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null); // default: nothing to do!
if (update) // Only bother in case of update, not lookup!
{
log.logDebug(toString(), Messages.getString("DimensionLookupMeta.Log.Update")); //$NON-NLS-1$
if (databaseMeta != null)
{
if (prev != null && prev.size() > 0)
{
if (tableName != null && tableName.length() > 0)
{
Database db = new Database(databaseMeta);
try
{
db.connect();
String sql = ""; //$NON-NLS-1$
// How does the table look like?
Row fields = new Row();
// First the technical key
Value vkeyfield = new Value(keyField, Value.VALUE_TYPE_INTEGER);
vkeyfield.setLength(10);
fields.addValue(vkeyfield);
// The the version
Value vversion = new Value(versionField, Value.VALUE_TYPE_INTEGER);
vversion.setLength(5);
fields.addValue(vversion);
// The date from
Value vdatefrom = new Value(dateFrom, Value.VALUE_TYPE_DATE);
fields.addValue(vdatefrom);
// The date to
Value vdateto = new Value(dateTo, Value.VALUE_TYPE_DATE);
fields.addValue(vdateto);
String errors = ""; //$NON-NLS-1$
// Then the keys
for (int i = 0; i < keyLookup.length; i++)
{
Value vprev = prev.searchValue(keyStream[i]);
if (vprev != null)
{
Value field = new Value(vprev);
field.setName(keyLookup[i]);
fields.addValue(field);
}
else
{
if (errors.length() > 0)
errors += ", "; //$NON-NLS-1$
errors += keyStream[i];
}
}
// Then the fields to update...
for (int i = 0; i < fieldLookup.length; i++)
{
Value vprev = prev.searchValue(fieldStream[i]);
if (vprev != null)
{
Value field = new Value(vprev);
field.setName(fieldLookup[i]);
fields.addValue(field);
}
else
{
if (errors.length() > 0)
errors += ", "; //$NON-NLS-1$
errors += fieldStream[i];
}
}
if (errors.length() > 0)
{
retval.setError(Messages.getString("DimensionLookupMeta.ReturnValue.UnableToFindFields") + errors); //$NON-NLS-1$
}
log
.logDebug(toString(), Messages.getString("DimensionLookupMeta.Log.GetDDLForTable") + tableName + "] : " //$NON-NLS-1$ //$NON-NLS-2$
+ fields.toStringMeta());
sql += db.getDDL(tableName, fields, (sequenceName != null && sequenceName.length() == 0) ? keyField
: null,
autoIncrement, null, true);
log.logDebug(toString(), "sql =" + sql); //$NON-NLS-1$
String idx_fields[] = null;
// Key lookup dimensions...
if (keyLookup != null && keyLookup.length > 0)
{
idx_fields = new String[keyLookup.length];
for (int i = 0; i < keyLookup.length; i++)
idx_fields[i] = keyLookup[i];
}
else
{
retval
.setError(Messages.getString("DimensionLookupMeta.ReturnValue.NoKeyFieldsSpecified")); //$NON-NLS-1$
}
if (idx_fields != null && idx_fields.length > 0
&& !db.checkIndexExists(tableName, idx_fields))
{
String indexname = "idx_" + tableName + "_lookup"; //$NON-NLS-1$ //$NON-NLS-2$
sql += db.getCreateIndexStatement(tableName, indexname, idx_fields, false, false,
false, true);
}
// (Bitmap) index on technical key
idx_fields = new String[] { keyField };
if (keyField != null && keyField.length() > 0)
{
if (!db.checkIndexExists(tableName, idx_fields))
{
String indexname = "idx_" + tableName + "_tk"; //$NON-NLS-1$ //$NON-NLS-2$
sql += db.getCreateIndexStatement(tableName, indexname, idx_fields, true, false,
true, true);
}
}
else
{
retval.setError(Messages.getString("DimensionLookupMeta.ReturnValue.TechnicalKeyFieldRequired")); //$NON-NLS-1$
}
// The optional Oracle sequence
if ( CREATION_METHOD_SEQUENCE.equals(getTechKeyCreation()) &&
sequenceName != null)
{
if (!db.checkSequenceExists(sequenceName))
{
sql += db.getCreateSequenceStatement(sequenceName, 1L, 1L, -1L, true);
}
}
if (sql.length() == 0)
retval.setSQL(null);
else
retval.setSQL(sql);
}
catch (KettleDatabaseException dbe)
{
retval.setError(Messages.getString("DimensionLookupMeta.ReturnValue.ErrorOccurred") + dbe.getMessage()); //$NON-NLS-1$
}
finally
{
db.disconnect();
}
}
else
{
retval.setError(Messages.getString("DimensionLookupMeta.ReturnValue.NoTableDefinedOnConnection")); //$NON-NLS-1$
}
}
else
{
retval
.setError(Messages.getString("DimensionLookupMeta.ReturnValue.NotReceivingAnyFields")); //$NON-NLS-1$
}
}
else
{
retval.setError(Messages.getString("DimensionLookupMeta.ReturnValue.NoConnectionDefiendInStep")); //$NON-NLS-1$
}
}
return retval;
}
public void analyseImpact(ArrayList impact, TransMeta transMeta, StepMeta stepMeta, Row prev, String input[], String output[], Row info)
{
if (prev!=null)
{
if (!update)
{
// Lookup: we do a lookup on the natural keys + the return fields!
for (int i=0;i<keyLookup.length;i++)
{
Value v = prev.searchValue(keyStream[i]);
DatabaseImpact ii = new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_READ,
transMeta.getName(),
stepMeta.getName(),
databaseMeta.getDatabaseName(),
tableName,
keyLookup[i],
keyStream[i],
v!=null?v.getOrigin():"?", //$NON-NLS-1$
"", //$NON-NLS-1$
"Type = "+v.toStringMeta() //$NON-NLS-1$
);
impact.add(ii);
}
// Return fields...
for (int i=0;i<fieldLookup.length ;i++)
{
Value v = prev.searchValue(fieldStream[i]);
DatabaseImpact ii = new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_READ,
transMeta.getName(),
stepMeta.getName(),
databaseMeta.getDatabaseName(),
tableName,
fieldLookup[i],
fieldLookup[i],
v!=null?v.getOrigin():"?", //$NON-NLS-1$
"", //$NON-NLS-1$
"Type = "+v.toStringMeta() //$NON-NLS-1$
);
impact.add(ii);
}
}
else
{
// Lookup: we do a lookup on the natural keys + the return fields!
for (int i=0;i<keyLookup.length;i++)
{
Value v = prev.searchValue(keyStream[i]);
DatabaseImpact ii = new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_READ_WRITE,
transMeta.getName(),
stepMeta.getName(),
databaseMeta.getDatabaseName(),
tableName,
keyLookup[i],
keyStream[i],
v.getOrigin(),
"", //$NON-NLS-1$
"Type = "+v.toStringMeta() //$NON-NLS-1$
);
impact.add(ii);
}
// Return fields...
for (int i=0;i<fieldLookup.length ;i++)
{
Value v = prev.searchValue(fieldStream[i]);
DatabaseImpact ii = new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_READ_WRITE,
transMeta.getName(),
stepMeta.getName(),
databaseMeta.getDatabaseName(),
tableName,
fieldLookup[i],
fieldLookup[i],
v.getOrigin(),
"", //$NON-NLS-1$
"Type = "+v.toStringMeta() //$NON-NLS-1$
);
impact.add(ii);
}
}
}
}
public StepDialogInterface getDialog(Shell shell, StepMetaInterface info, TransMeta transMeta, String name)
{
return new DimensionLookupDialog(shell, info, transMeta, name);
}
public StepInterface getStep(StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, Trans trans)
{
return new DimensionLookup(stepMeta, stepDataInterface, cnr, tr, trans);
}
public StepDataInterface getStepData()
{
return new DimensionLookupData();
}
public DatabaseMeta[] getUsedDatabaseConnections()
{
if (databaseMeta!=null)
{
return new DatabaseMeta[] { databaseMeta };
}
else
{
return super.getUsedDatabaseConnections();
}
}
}
|
package big.marketing.controller;
import java.util.ArrayList;
import java.util.Map;
import java.util.Observable;
import org.apache.log4j.Logger;
import org.gephi.graph.api.GraphController;
import org.gephi.graph.api.GraphModel;
import org.gephi.io.importer.api.Container;
import org.gephi.io.importer.api.ContainerFactory;
import org.gephi.io.importer.api.ContainerLoader;
import org.gephi.io.importer.api.ImportController;
import org.gephi.io.processor.plugin.DefaultProcessor;
import org.gephi.preview.api.PreviewController;
import org.gephi.preview.api.ProcessingTarget;
import org.gephi.preview.api.RenderTarget;
import org.gephi.project.api.ProjectController;
import org.gephi.project.api.Workspace;
import org.openide.util.Lookup;
import big.marketing.data.FlowMessage;
import big.marketing.data.QueryWindowData;
import big.marketing.view.GraphJPanel;
public class GephiController extends Observable {
static Logger logger = Logger.getLogger(GephiController.class);
private ImportController importController;
private PreviewController previewController;
private ProjectController projectController;
private DataController dc;
private Workspace workspace;
private Map<String, String> ipHostMap;
public GephiController(DataController dc) {
projectController = Lookup.getDefault().lookup(ProjectController.class);
this.dc = dc;
ipHostMap = dc.getMongoController().getHostIPMap();
// load an emtpy graph for initializing the RenderTarget and Applet(in GraphPanel)
loadEmptyContainer();
// By now the JPanel on which to draw on is not known, so we cannot set up the RenderTarget.
// This is done in setGraphPanel(...)
}
public void loadEmptyContainer() {
load(new QueryWindowData(new ArrayList<FlowMessage>(), null, null, null));
}
/**
* Terminates all current active Threads which name one of the given threadNames. <br>
*
* <b>WARNING:</b>Be careful, no security checks are done, can break the VM.
* @param terminationTargets
*/
private void terminateThreads(String... terminationTargets) {
// All Threads are organized in a tree structure, so get the root of the tree
ThreadGroup tg = Thread.currentThread().getThreadGroup();
while (tg.getParent() != null) {
tg = tg.getParent();
}
// now we have the root node of the tree and can fetch all current active Threads
// Allocate some additional slots because new Threads could have been started between call of tg.activeCount() and tg.enumerate()
Thread[] threads = new Thread[tg.activeCount() + 5];
int realCount = tg.enumerate(threads);
for (int i = 0; i < realCount; i++) {
Thread thread = threads[i];
if (thread == null)
continue;
for (String terminationName : terminationTargets) {
if (thread.getName().equals(terminationName)) {
logger.info("Killing Thread " + thread.getName());
thread.stop();
}
}
}
}
public void load(QueryWindowData newDataset) {
GraphModel graphModel = Lookup.getDefault().lookup(GraphController.class).getModel();
if (graphModel != null) {
graphModel.clear();
projectController.cleanWorkspace(projectController.getCurrentWorkspace());
// projectController.closeCurrentWorkspace();
// REALLY DIRTY HACK !!!
terminateThreads("DHNS View Destructor");
// END REALLY DIRTY HACK!!!!
} else {
projectController.newProject();
}
// init
importController = Lookup.getDefault().lookup(ImportController.class);
previewController = Lookup.getDefault().lookup(PreviewController.class);
workspace = projectController.getCurrentWorkspace();
// import to container
Container container = Lookup.getDefault().lookup(ContainerFactory.class).newContainer();
GephiImporter gImporter = new GephiImporter(newDataset, ipHostMap);
ContainerLoader loader = container.getLoader();
gImporter.execute(loader);
// process data from container into internal graph structure
importController.process(container, new DefaultProcessor(), workspace);
// update view
setChanged();
notifyObservers(previewController);
}
public void setGraphPanel(GraphJPanel graphPanel) {
addObserver(graphPanel);
// now we know the Panel where to draw, so create and set the RenderTarget
RenderTarget rt = previewController.getRenderTarget(RenderTarget.PROCESSING_TARGET);
graphPanel.setContent((ProcessingTarget) rt);
}
public void render(ProcessingTarget target) {
previewController.render(target);
}
}
|
import java.util.*;
public class Command
{
public static final String MASK = "mask";
public static final String MEM = "mem";
public Command (String mask, Vector<String> cmds)
{
_mask = mask;
_commands = cmds;
}
public void execute (HashMap<Long, Long> memory)
{
}
@Override
public String toString ()
{
String str = _mask;
for (int i = 0; i < _commands.size(); i++)
{
str += "\n" + _commands.elementAt(i);
}
return str;
}
private String _mask;
private Vector<String> _commands;
}
|
package org.jgroups.conf;
import org.jgroups.Constructable;
import org.jgroups.Global;
import org.jgroups.Header;
import org.jgroups.util.IntHashMap;
import org.jgroups.util.Triple;
import org.jgroups.util.Util;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.*;
import java.util.function.Supplier;
/**
* Maintains a mapping between magic IDs and classes (defined in jg-magic-map.xml), and between protocol IDs and
* protocol classes (defined in jg-protocol-ids.xml). The first mapping is used to for fast serialization, whereas
* the second is used to assign protocol IDs to protocols at startup time.
*
* @author Filip Hanik
* @author Bela Ban
*/
public class ClassConfigurator {
public static final String MAGIC_NUMBER_FILE = "jg-magic-map.xml";
public static final String PROTOCOL_ID_FILE = "jg-protocol-ids.xml";
protected static final String CLASS = "<class";
protected static final String ID = "id";
protected static final String NAME = "name";
protected static final String EXTERNAL = "external";
private static final int MAX_MAGIC_VALUE=100;
private static final int MAX_PROT_ID_VALUE=256;
private static final short MIN_CUSTOM_MAGIC_NUMBER=1024;
private static final short MIN_CUSTOM_PROTOCOL_ID=512;
// this is where we store magic numbers; contains data from jg-magic-map.xml; key=Class, value=magic number
private static final Map<Class<?>,Short> classMap=new IdentityHashMap<>(MAX_MAGIC_VALUE);
// Magic map for all values defined in jg-magic-map.xml; elements are supplier functions which create instances
private static final Supplier<? extends Object>[] magicMap=new Supplier[MAX_MAGIC_VALUE];
// Magic map for user-defined IDs / classes or suppliers
private static final Map<Short,Object> magicMapUser=new HashMap<>(); // key=magic number, value=Class or Supplier<Header>
/** Contains data read from jg-protocol-ids.xml */
private static final Map<Class<?>,Short> protocol_ids=new IdentityHashMap<>(MAX_PROT_ID_VALUE);
private static final IntHashMap<Class<?>> protocol_names=new IntHashMap<>(MAX_PROT_ID_VALUE);
static {
try {
init();
}
catch(Exception e) {
throw new ExceptionInInitializerError(e);
}
}
public ClassConfigurator() {
}
public static void addIfAbsent(short magic, Class<?> clazz) {
if(!magicMapUser.containsKey(magic) && !classMap.containsKey(clazz))
add(magic, clazz);
}
public static void add(short magic, Class<?> clazz) {
if(magic < MIN_CUSTOM_MAGIC_NUMBER)
throw new IllegalArgumentException("magic ID (" + magic + ") must be >= " + MIN_CUSTOM_MAGIC_NUMBER);
if(magicMapUser.containsKey(magic) || classMap.containsKey(clazz))
alreadyInMagicMap(magic, clazz.getName());
Object inst=null;
try {
inst=clazz.getDeclaredConstructor().newInstance();
}
catch(Exception e) {
throw new IllegalStateException("failed creating instance " + clazz, e);
}
Object val=clazz;
if(Header.class.isAssignableFrom(clazz)) { // class is a header
checkSameId((Header)inst, magic);
val=((Header)inst).create();
}
if(Constructable.class.isAssignableFrom(clazz)) {
val=((Constructable<?>)inst).create();
inst=((Supplier<?>)val).get();
if(!inst.getClass().equals(clazz))
throw new IllegalStateException(String.format("%s.create() returned the wrong class: %s\n",
clazz.getSimpleName(), inst.getClass().getSimpleName()));
}
magicMapUser.put(magic, val);
classMap.put(clazz, magic);
}
public static void addProtocol(short id, Class<?> protocol) {
if(id < MIN_CUSTOM_PROTOCOL_ID)
throw new IllegalArgumentException("protocol ID (" + id + ") needs to be greater than or equal to " + MIN_CUSTOM_PROTOCOL_ID);
if(protocol_ids.containsKey(protocol))
alreadyInProtocolsMap(id, protocol.getName());
protocol_ids.put(protocol, id);
protocol_names.put(id, protocol);
}
public static <T extends Object> T create(short id) throws ClassNotFoundException {
if(id >= MIN_CUSTOM_MAGIC_NUMBER) {
Object val=magicMapUser.get(id);
if(val == null)
throw new ClassNotFoundException("Class for magic number " + id + " cannot be found, map: "+ magicMapUser);
if (val instanceof Supplier) {
return ((Supplier<T>) val).get();
}
try {
return ((Class<T>) val).getDeclaredConstructor().newInstance();
}
catch (ReflectiveOperationException e) {
throw new IllegalStateException(e);
}
}
if(id < 0 || id > MAX_MAGIC_VALUE)
throw new IllegalArgumentException(String.format("invalid magic number %d; needs to be in range [0..%d]",
id, MAX_MAGIC_VALUE));
Supplier<?> supplier=magicMap[id];
if(supplier == null)
throw new ClassNotFoundException("Class for magic number " + id + " cannot be found");
return (T)supplier.get();
}
/**
* Loads and returns the class from the class name
*
* @param clazzname a fully classified class name to be loaded
* @return a Class object that represents a class that implements java.io.Externalizable
*/
public static Class<?> get(String clazzname, ClassLoader loader) throws ClassNotFoundException {
return Util.loadClass(clazzname, loader != null? loader : ClassConfigurator.class.getClassLoader());
}
public static Class<?> get(String clazzname) throws ClassNotFoundException {
return Util.loadClass(clazzname, ClassConfigurator.class);
}
/**
* Returns the magic number for the class.
*
* @param clazz a class object that we want the magic number for
* @return the magic number for a class, -1 if no mapping is available
*/
public static short getMagicNumber(Class<?> clazz) {
Short i=classMap.get(clazz);
if(i == null)
return -1;
else
return i;
}
public static short getProtocolId(Class<?> protocol) {
Short retval=protocol_ids.get(protocol);
return retval != null? retval : 0;
}
public static Class<?> getProtocol(short id) {
return protocol_names.get(id);
}
public static String printClassMap() {
StringBuilder sb=new StringBuilder();
for(Iterator<Map.Entry<Class<?>,Short>> it=classMap.entrySet().iterator(); it.hasNext();) {
Map.Entry<Class<?>,Short> entry=it.next();
sb.append(entry.getKey()).append(": ").append(entry.getValue()).append('\n');
}
return sb.toString();
}
protected static void init() throws Exception {
String magic_number_file=null, protocol_id_file=null;
try {
magic_number_file=Util.getProperty(new String[]{Global.MAGIC_NUMBER_FILE, "org.jgroups.conf.magicNumberFile"},
null, null, MAGIC_NUMBER_FILE);
protocol_id_file=Util.getProperty(new String[]{Global.PROTOCOL_ID_FILE, "org.jgroups.conf.protocolIDFile"},
null, null, PROTOCOL_ID_FILE);
}
catch (SecurityException ignored) {
}
// Read jg-magic-map.xml
List<Triple<Short,String,Boolean>> mapping=readMappings(magic_number_file);
for(Triple<Short,String,Boolean> tuple: mapping) {
short m=tuple.getVal1();
if(m >= MAX_MAGIC_VALUE)
throw new IllegalArgumentException("ID " + m + " is bigger than MAX_MAGIC_VALUE (" +
MAX_MAGIC_VALUE + "); increase MAX_MAGIC_VALUE");
boolean external=tuple.getVal3();
if(external) {
if(magicMap[m] != null)
alreadyInMagicMap(m, tuple.getVal2());
continue;
}
Class<?> clazz=Util.loadClass(tuple.getVal2(), ClassConfigurator.class);
if(magicMap[m] != null)
alreadyInMagicMap(m, clazz.getName());
if(Constructable.class.isAssignableFrom(clazz)) {
Constructable<?> obj=(Constructable<?>)clazz.getDeclaredConstructor().newInstance();
magicMap[m]=obj.create();
}
else {
Supplier<? extends Object> supplier=(Supplier<Object>)() -> {
try {
return clazz.getDeclaredConstructor().newInstance();
}
catch(Throwable throwable) {
return null;
}
};
magicMap[m]=supplier;
}
Object inst=magicMap[m].get();
if(inst == null)
continue;
// test to confirm that the Constructable impl returns an instance of the correct type
if(!inst.getClass().equals(clazz))
throw new IllegalStateException(String.format("%s.create() returned the wrong class: %s\n",
clazz.getSimpleName(), inst.getClass().getSimpleName()));
// check that the IDs are the same
if(inst instanceof Header)
checkSameId((Header)inst, m);
classMap.put(clazz, m);
}
mapping=readMappings(protocol_id_file); // Read jg-protocol-ids.xml
for(Triple<Short,String,Boolean> tuple: mapping) {
short m=tuple.getVal1();
boolean external=tuple.getVal3();
if(external) {
if(protocol_names.containsKey(m))
alreadyInProtocolsMap(m, tuple.getVal2());
continue;
}
Class<?> clazz=Util.loadClass(tuple.getVal2(), ClassConfigurator.class);
if(protocol_ids.containsKey(clazz))
alreadyInProtocolsMap(m, clazz.getName());
protocol_ids.put(clazz, m);
protocol_names.put(m, clazz);
}
}
protected static void checkSameId(Header hdr, short magic) {
short tmp_id=hdr.getMagicId();
if(tmp_id != magic)
throw new IllegalStateException(String.format("mismatch between %s.getId() (%d) and the defined ID (%d)",
hdr.getClass().getSimpleName(), magic, tmp_id));
}
protected static void alreadyInMagicMap(short magic, String classname) {
throw new IllegalArgumentException("key " + magic + " (" + classname + ')' +
" is already in magic map; make sure that all keys are unique");
}
protected static void alreadyInProtocolsMap(short prot_id, String classname) {
throw new IllegalArgumentException("ID " + prot_id + " (" + classname + ')' +
" is already in protocol-ids map; make sure that all protocol IDs are unique");
}
/**
* try to read the magic number configuration file as a Resource form the classpath using getResourceAsStream
* if this fails this method tries to read the configuration file from mMagicNumberFile using a FileInputStream (not in classpath but somewhere else in the disk)
*
* @return an array of ClassMap objects that where parsed from the file (if found) or an empty array if file not found or had en exception
*/
protected static List<Triple<Short,String,Boolean>> readMappings(String name) throws Exception {
InputStream stream=Util.getResourceAsStream(name, ClassConfigurator.class);
// try to load the map from file even if it is not a Resource in the class path
if(stream == null)
stream=new FileInputStream(name);
return parse(stream);
}
protected static List<Triple<Short,String,Boolean>> parse(InputStream in) throws Exception {
List<String> lines=parseLines(in);
List<Triple<Short,String,Boolean>> retval=new ArrayList<>();
for(String line: lines) {
short id;
String name;
boolean external=false;
int index=line.indexOf(ID);
if(index == -1)
notFound(ID, line);
index+=ID.length()+1;
id=Short.parseShort(parseNextString(line, index));
index=line.indexOf(NAME);
if(index == -1)
notFound(NAME, line);
index+=NAME.length()+1;
name=parseNextString(line, index);
index=line.indexOf(EXTERNAL);
if(index >= 0) {
index+=EXTERNAL.length()+1;
external=Boolean.parseBoolean(parseNextString(line, index));
}
Triple<Short,String,Boolean> t=new Triple<>(id, name, external);
retval.add(t);
}
return retval;
}
protected static void notFound(String id, String line) {
throw new IllegalStateException(String.format("%s not found in line %s", id, line));
}
protected static String parseNextString(String line, int index) {
int start=line.indexOf("\"", index);
if(index == -1)
notFound("\"", line.substring(index));
int end=line.indexOf("\"", start+1);
if(index == -1)
notFound("\"", line.substring(index+1));
return line.substring(start+1, end);
}
protected static List<String> parseLines(InputStream in) throws IOException {
List<String> lines=new LinkedList<>();
for(;;) {
String token=Util.readToken(in);
if(token == null)
break;
token=token.trim();
if(token.startsWith(CLASS)) {
String line=token + " " + readTillMatchingParens(in);
lines.add(line);
}
}
return lines;
}
protected static String readTillMatchingParens(InputStream in) throws IOException {
StringBuilder sb=new StringBuilder();
for(;;) {
int ch=in.read();
if(ch == -1)
break;
sb.append((char)ch);
if(ch == '>')
break;
}
return sb.toString();
}
}
|
package uk.ac.ed.inf.Metabolic.parser;
import java.util.List;
import org.pathwayeditor.businessobjectsAPI.IContextProperty;
import org.pathwayeditor.businessobjectsAPI.ILabel;
import org.pathwayeditor.businessobjectsAPI.ILink;
import org.pathwayeditor.businessobjectsAPI.IMapObject;
import org.pathwayeditor.businessobjectsAPI.IPort;
import org.pathwayeditor.businessobjectsAPI.IRootMapObject;
import org.pathwayeditor.businessobjectsAPI.Location;
import org.pathwayeditor.contextadapter.publicapi.IValidationRuleDefinition;
import org.pathwayeditor.contextadapter.toolkit.ndom.AbstractNDOMParser;
import org.pathwayeditor.contextadapter.toolkit.ndom.ModelObject;
import org.pathwayeditor.contextadapter.toolkit.ndom.NdomException;
import org.pathwayeditor.contextadapter.toolkit.validation.RuleValidationReportBuilder;
import uk.ac.ed.inf.Metabolic.ndomAPI.ERelType;
import uk.ac.ed.inf.Metabolic.ndomAPI.IModel;
/**
* $Id$
*
* @author Anatoly Sorokin
* @date 12 May 2008
*
*/
public abstract class NDOMFactory extends AbstractNDOMParser {
protected MetabolicModel ndom;
private RuleValidationReportBuilder reportBuilder;
private MetabolicRuleLoader loader = (MetabolicRuleLoader) MetabolicRuleLoader.getInstance();
// private
public NDOMFactory(IRootMapObject rmo) {
super(rmo);
}
public NDOMFactory() {
super();
}
// coould be in abstract class??
public IModel getNdom() {
return ndom;
}
/**
*
* @param parent
* @param mapObject
*/
protected abstract void compartment(MetabolicCompartment parent,
IMapObject mapObject);
protected abstract void compound(MetabolicCompartment compartment,
IMapObject mapObject);
protected abstract void compound(MetabolicMacromolecule m,
IMapObject mapObject);
protected abstract void macromolecule(MetabolicCompartment comaprtment,
IMapObject mapObject);
protected abstract void macromolecule(MetabolicMacromolecule parent,
IMapObject mapObject);
protected abstract void process(ModelObject parent, IMapObject mapObject);
/**
* Creates Activation relation in the model. That method creates activation
* relation by invocation of {@link #activation(ILink)} method and
* registered this relation with reaction <code>r</code> and source
* shape,obtained from <code>el</code>. Preconditions:<br>
* <ul>
* <li>Reaction is not <code>NULL</code></li>
* <li> link <code>el</code> has non-null and registered with the model
* source shape </li>
* </ul>
* Postconditions:<br>
* <ul>
* <li>Fully defined valid Relation object of type
* {@link ERelType#Activation}</li>
* </ul>
*
* @param el
* non-null Link object to convert to Relation
* @param r
* non-null reaction to register new Relation with
*/
protected abstract void activate(ILink el, MetabolicReaction r);
/**
* Creates Catalysis relation in the model. That method creates catalysis
* relation by invocation of {@link #catalysis(ILink)} method and registered
* this relation with reaction <code>r</code> and source shape,obtained
* from <code>el</code>. Preconditions:<br>
* <ul>
* <li>Reaction is not <code>NULL</code></li>
* <li> link <code>el</code> has non-null and registered with the model
* source shape </li>
* </ul>
* Postconditions:<br>
* <ul>
* <li>Fully defined valid Relation object of type
* {@link ERelType#Catalysis}</li>
* </ul>
*
* @param el
* non-null Link object to convert to Relation
* @param r
* non-null reaction to register new Relation with
*/
protected abstract void catalysis(ILink el, MetabolicReaction r);
/**
* Creates inhibitory relation in the model. That method creates inhibitory
* relation by invocation of {@link #inhibition(ILink)} method and
* registered this relation with reaction <code>r</code> and source
* shape,obtained from <code>el</code>. Preconditions:<br>
* <ul>
* <li>Reaction is not <code>NULL</code></li>
* <li> link <code>el</code> has non-null and registered with the model
* source shape </li>
* </ul>
* Postconditions:<br>
* <ul>
* <li>Fully defined valid Relation object of type
* {@link ERelType#Inhibition}</li>
* </ul>
*
* @param el
* non-null Link object to convert to Relation
* @param r
* non-null reaction to register new Relation with
*/
protected abstract void inhibit(ILink el, MetabolicReaction r);
/**
* Creates Production relation in the model. That method creates production
* relation by invocation of {@link #production(ILink)} method and
* registered this relation with reaction <code>r</code> and source
* shape,obtained from <code>el</code>. Preconditions:<br>
* <ul>
* <li>Reaction is not <code>NULL</code></li>
* <li> link <code>el</code> has non-null and registered with the model
* source shape </li>
* </ul>
* Postconditions:<br>
* <ul>
* <li>Fully defined valid Relation object of type
* {@link ERelType#Production}</li>
* </ul>
*
* @param el
* non-null Link object to convert to Relation
* @param r
* non-null reaction to register new Relation with
*/
protected abstract void products(ILink el, MetabolicReaction r);
/**
* Creates Consumption relation in the model. That method creates
* consumption relation by invocation of {@link #consumption(ILink)} method
* and registered this relation with reaction <code>r</code> and source
* shape,obtained from <code>el</code>. Preconditions:<br>
* <ul>
* <li>Reaction is not <code>NULL</code></li>
* <li> link <code>el</code> has non-null and registered with the model
* source shape </li>
* </ul>
* Postconditions:<br>
* <ul>
* <li>Fully defined valid Relation object of type
* {@link ERelType#Consumption}</li>
* </ul>
*
* @param el
* non-null Link object to convert to Relation
* @param r
* non-null reaction to register new Relation with
*/
protected abstract void substrate(ILink el, MetabolicReaction r);
protected MetabolicCompartment compartment(IMapObject mapObject) {
MetabolicCompartment compartment = new MetabolicCompartment(
getId(mapObject), mapObject.getName().getHTML(),
AbstractNDOMParser.getASCIIName(mapObject), ndom);
compartment.setDescription(mapObject.getDescription().getHTML());
compartment.setDetailedDescription(mapObject.getDetailedDescription()
.getHTML());
compartment
.setGOTerm(mapObject.getPropertyByName("GO term").getValue());
compartment.setVolume(1.0d);
return compartment;
}
protected MetabolicCompound compound(IMapObject mapObject) {
MetabolicCompound comp = new MetabolicCompound(getId(mapObject),
mapObject.getName().getHTML(), AbstractNDOMParser
.getASCIIName(mapObject));
comp.setDescription(mapObject.getDescription().getHTML());
comp.setDetailedDescription(mapObject.getDetailedDescription()
.getHTML());
comp.setCID(mapObject.getPropertyByName("CID").getValue());
comp.setChEBIId(mapObject.getPropertyByName("ChEBI").getValue());
comp.setInChI(mapObject.getPropertyByName("InChI").getValue());
comp.setPubChemId(mapObject.getPropertyByName("PubChem").getValue());
comp.setSmiles(mapObject.getPropertyByName("SMILES").getValue());
setIC(mapObject, comp);
return comp;
}
private void setIC(IMapObject mapObject, MetabolicCompound comp) {
IValidationRuleDefinition rd= reportBuilder.getRuleStore().getRuleById(MetabolicRuleLoader.IC_ERROR_ID);
DoublePropertyRule r=(DoublePropertyRule) loader.getRuleByDef(rd);
r.setObject(mapObject);
if(r.validate(reportBuilder)){
comp.setIC(r.getValue());
}
}
protected MetabolicMacromolecule macromolecule(IMapObject mapObject) {
MetabolicMacromolecule m = new MetabolicMacromolecule(getId(mapObject),
mapObject.getName().getHTML(), AbstractNDOMParser
.getASCIIName(mapObject));
m.setDescription(mapObject.getDescription().getHTML());
m.setDetailedDescription(mapObject.getDetailedDescription().getHTML());
m.setGOTerm(mapObject.getPropertyByName("GO term").getValue());
m.setUniProt(mapObject.getPropertyByName("UniProt").getValue());
List<IMapObject> ch = mapObject.getChildren();
for (IMapObject el : ch) {
String ot = el.getObjectType().getTypeName();
if ("Compound".equals(ot)) {
compound(m, el);
} else if ("Macromolecule".equals(ot)) {
macromolecule(m, el);
}
}
return m;
}
@Override
protected void ndom() {
ndom = new MetabolicModel(getId(getRmo()), getRmo().getParentMap()
.getName(), getRmo().getParentMap().getName());// AbstractNDOMParser.getASCIIName(getRmo()));
ndom.setDescription(getRmo().getParentMap().getDescription());
ndom.setDetailedDescription("");
}
protected MetabolicReaction process(IMapObject mapObject) {
MetabolicReaction re = new MetabolicReaction(getId(mapObject),
mapObject.getName().getHTML(), AbstractNDOMParser
.getASCIIName(mapObject));
re.setDescription(mapObject.getDescription().getHTML());
re.setDetailedDescription(mapObject.getDetailedDescription().getHTML());
re.setECNumber(mapObject.getPropertyByName("EC").getValue());
re.setKineticLaw(mapObject.getPropertyByName("KineticLaw").getValue());
setReParam(mapObject, re);
// re.setParameters(mapObject.getPropertyByName("Parameters").getValue());
IContextProperty revProp = mapObject.getPropertyByName("Reversibility");
String value = revProp.getValue();
re.setReversible(value);
return re;
}
@Override
protected void rmo() {
// MetabolicCompartment def = new MetabolicCompartment(getId(getRmo()),
// "default", "default", ndom);
MetabolicCompartment def = compartment(getRmo());
try {
ndom.addCompartment(def);
List<IMapObject> ch = getRmo().getChildren();
for (IMapObject el : ch) {
if(!(el instanceof ILabel)){
String ot = el.getObjectType().getTypeName();
if ("Compartment".equals(ot)) {
compartment(def, el);
} else if ("Process".equals(ot)) {
process(def, el);
} else if ("Compound".equals(ot)) {
compound(def, el);
} else if ("Macromolecule".equals(ot)) {
macromolecule(def, el);
}
}
}
} catch (NdomException e) {
report(e);
// e.printStackTrace();
}
}
protected MetabolicRelation production(ILink el) {
MetabolicRelation rel = relation(el, ERelType.Production);
rel.setRole(el.getTargetPort().getPropertyByName("ROLE").getValue());
// rel.setStoichiometry(getInt(el.getSrcPort().getPropertyByName("STOICH")
// .getValue(), "Wrong stoichiometry\t"));
setStoichiometry(el, el.getSrcPort(),rel);
return rel;
}
protected MetabolicRelation activation(ILink el) {
MetabolicRelation rel = relation(el, ERelType.Activation);
rel.setRole(el.getTargetPort().getPropertyByName("ROLE").getValue());
// rel.setStoichiometry(getInt(el.getSrcPort().getPropertyByName("STOICH")
// .getValue(), "Wrong stoichiometry\t"));
setStoichiometry(el,el.getSrcPort(), rel);
return rel;
}
protected void setStoichiometry(ILink el, IPort p,MetabolicRelation rel) {
IValidationRuleDefinition rd= reportBuilder.getRuleStore().getRuleById(MetabolicRuleLoader.STOICH_ERROR_ID);
IntPropertyRule r=(IntPropertyRule) loader.getRuleByDef(rd);
r.setObject(p);
r.setRefObject(el);
if(r.validate(reportBuilder)){
rel.setStoichiometry(r.getValue());
}
}
/**
* check validity of parameter string in the process node.
* Parameter string should contains set of name=value pairs separated by <code>;</code>.
* RegExp:<br><code>^(\\s*\\w+\\s*=\\s*[0-9eE-+.]\\s*;)+$</code>
* @param re process node;
*/
protected void setReParam(IMapObject mapObject, MetabolicReaction re) {
IValidationRuleDefinition rd= reportBuilder.getRuleStore().getRuleById(MetabolicRuleLoader.RE_PARAM_ERROR_ID);
RegexpPropertyRule r=(RegexpPropertyRule) loader.getRuleByDef(rd);
r.setObject(mapObject);
if(r.validate(reportBuilder)){
re.setParameters(r.getValue());
}
}
protected MetabolicRelation inhibition(ILink el) {
MetabolicRelation rel = relation(el, ERelType.Inhibition);
rel.setRole(el.getTargetPort().getPropertyByName("ROLE").getValue());
// rel.setStoichiometry(getInt(el.getSrcPort().getPropertyByName("STOICH")
// .getValue(), "Wrong stoichiometry\t"));
setStoichiometry(el, el.getSrcPort(),rel);
return rel;
}
protected MetabolicRelation catalysis(ILink el) {
MetabolicRelation rel = relation(el, ERelType.Catalysis);
rel.setRole(el.getTargetPort().getPropertyByName("ROLE").getValue());
// rel.setStoichiometry(getInt(el.getSrcPort().getPropertyByName("STOICH")
// .getValue(), "Wrong stoichiometry\t"));
setStoichiometry(el, el.getSrcPort(),rel);
return rel;
}
protected MetabolicRelation consumption(ILink el) {
MetabolicRelation rel = relation(el, ERelType.Consumption);
rel.setRole(el.getSrcPort().getPropertyByName("ROLE").getValue());
// rel.setStoichiometry(getInt(el.getTargetPort().getPropertyByName(
// "STOICH").getValue(), "Wrong stoichiometry\t"));
setStoichiometry(el, el.getTargetPort(),rel);
return rel;
}
protected MetabolicRelation relation(ILink el, ERelType type) {
MetabolicRelation rel = new MetabolicRelation(getId(el), "", "", type);
rel.setDescription(el.getDescription().getHTML());
rel.setDetailedDescription(el.getDetailedDescription().getHTML());
rel.setVarName(el.getPropertyByName("VarName").getValue());
return rel;
}
public void setReportBuilder(RuleValidationReportBuilder reportBuilder) {
this.reportBuilder = reportBuilder;
prepareParsingRules();
}
public RuleValidationReportBuilder getReportBuilder() {
return reportBuilder;
}
/**
* Extracts rules from store. All rules, which could be used on
* parser step should be extracted and ready to use.
*/
private void prepareParsingRules() {
}
@Override
protected void connectivity() {
// TODO Auto-generated method stub
}
@Override
protected void error(String message) {
// TODO Auto-generated method stub
// super.error(message);
throw new UnsupportedOperationException ();
}
protected double getAngle(Location srcLoc, Location newLoc) {
throw new UnsupportedOperationException ();
}
@Override
protected double getDouble(String st, String message) {
throw new UnsupportedOperationException ();
}
@Override
protected int getInt(String st, String message) {
throw new UnsupportedOperationException ();
}
@Override
public List<String> getReport() {
throw new UnsupportedOperationException ();
}
@Override
protected void report(NdomException e) {
throw new UnsupportedOperationException ();
}
@Override
protected void warning(String message) {
throw new UnsupportedOperationException ();
}
}
/*
* $Log: NDOMFactory.java,v $ Revision 1.6 2008/07/15 11:14:32 smoodie
* Refactored so code compiles with new Toolkit framework.
*
* Revision 1.5 2008/06/27 13:22:15 radams adapt to validation servic einterface
* changes
*
* Revision 1.4 2008/06/20 22:48:19 radams imports
*
* Revision 1.3 2008/06/09 13:26:29 asorokin Bug fixes for SBML export
*
* Revision 1.2 2008/06/02 15:15:13 asorokin KineticLaw parameters parsing and
* validation
*
* Revision 1.1 2008/06/02 10:31:42 asorokin Reference to Service provider from
* all Service interfaces
*
*/
|
package org.jgroups.protocols.pbcast;
import org.jgroups.*;
import org.jgroups.annotations.DeprecatedProperty;
import org.jgroups.annotations.GuardedBy;
import org.jgroups.annotations.MBean;
import org.jgroups.annotations.ManagedAttribute;
import org.jgroups.annotations.ManagedOperation;
import org.jgroups.annotations.Property;
import org.jgroups.stack.Protocol;
import org.jgroups.util.Digest;
import org.jgroups.util.Promise;
import org.jgroups.util.Streamable;
import org.jgroups.util.Util;
import java.io.*;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Flush, as it name implies, forces group members to flush their pending
* messages while blocking them to send any additional messages. The process of
* flushing acquiesces the group so that state transfer or a join can be done.
* It is also called stop-the-world model as nobody will be able to send
* messages while a flush is in process.
*
* <p>
* Flush is needed for:
* <p>
* (1) State transfer. When a member requests state transfer, the coordinator
* tells everyone to stop sending messages and waits for everyone's ack. Then it
* asks the application for its state and ships it back to the requester. After
* the requester has received and set the state successfully, the coordinator
* tells everyone to resume sending messages.
* <p>
* (2) View changes (e.g.a join). Before installing a new view V2, flushing
* would ensure that all messages *sent* in the current view V1 are indeed
* *delivered* in V1, rather than in V2 (in all non-faulty members). This is
* essentially Virtual Synchrony.
*
*
*
* @author Vladimir Blagojevic
* @version $Id$
* @since 2.4
*/
@MBean(description="Flushes the cluster")
@DeprecatedProperty(names={"auto_flush_conf"})
public class FLUSH extends Protocol {
public static final String NAME = "FLUSH";
@Property(description="Max time to keep channel blocked in flush. Default is 8000 msec")
private long timeout = 8000;
@Property(description="Timeout (per atttempt) to quiet the cluster during the first flush phase. Default is 2500 msec")
private long start_flush_timeout = 2000;
@Property(description="Retry timeout after an unsuccessful attempt to quiet the cluster (first flush phase). Default is 3000 msec")
private long retry_timeout = 2000;
@Property(description="Reconcilliation phase toggle. Default is true")
private boolean enable_reconciliation = true;
@Property(description="Max number of attempts to quiet the cluster during first flush phase. Default is 4")
private int flush_retry_count = 4;
private long startFlushTime;
private long totalTimeInFlush;
private int numberOfFlushes;
private double averageFlushDuration;
@GuardedBy("sharedLock")
private View currentView;
private Address localAddress;
/**
* Group member that requested FLUSH. For view installations flush
* coordinator is the group coordinator For state transfer flush coordinator
* is the state requesting member
*/
@GuardedBy("sharedLock")
private Address flushCoordinator;
@GuardedBy("sharedLock")
private final List<Address> flushMembers;
private final AtomicInteger viewCounter = new AtomicInteger(0);
@GuardedBy("sharedLock")
private final Map<Address, Digest> flushCompletedMap;
@GuardedBy("sharedLock")
private final List<Address> flushNotCompletedMap;
@GuardedBy("sharedLock")
private final Set<Address> suspected;
@GuardedBy("sharedLock")
private final List<Address> reconcileOks;
private final Object sharedLock = new Object();
private final Object blockMutex = new Object();
/**
* Indicates if FLUSH.down() is currently blocking threads Condition
* predicate associated with blockMutex
*/
@GuardedBy("blockMutex")
private volatile boolean isBlockingFlushDown = true;
@GuardedBy("sharedLock")
private boolean flushCompleted = false;
private volatile boolean allowMessagesToPassUp = false;
private final Promise<Boolean> flush_promise = new Promise<Boolean>();
private final AtomicBoolean flushInProgress = new AtomicBoolean(false);
private final AtomicBoolean sentBlock = new AtomicBoolean(false);
private final AtomicBoolean sentUnblock = new AtomicBoolean(false);
public FLUSH(){
super();
currentView = new View(new ViewId(), new Vector<Address>());
flushCompletedMap = new HashMap<Address, Digest>();
flushNotCompletedMap = new ArrayList<Address>();
reconcileOks = new ArrayList<Address>();
flushMembers = new ArrayList<Address>();
suspected = new TreeSet<Address>();
}
public String getName() {
return NAME;
}
public long getStartFlushTimeout() {
return start_flush_timeout;
}
public void setStartFlushTimeout(long start_flush_timeout) {
this.start_flush_timeout=start_flush_timeout;
}
public long getRetryTimeout() {
return retry_timeout;
}
public void setRetryTimeout(long retry_timeout) {
this.retry_timeout=retry_timeout;
}
public void start() throws Exception {
Map<String, Object> map = new HashMap<String, Object>();
map.put("flush_supported", Boolean.TRUE);
up_prot.up(new Event(Event.CONFIG, map));
down_prot.down(new Event(Event.CONFIG, map));
viewCounter.set(0);
synchronized(blockMutex){
isBlockingFlushDown = true;
}
allowMessagesToPassUp = false;
}
public void stop() {
synchronized(sharedLock){
currentView = new View(new ViewId(), new Vector<Address>());
flushCompletedMap.clear();
flushNotCompletedMap.clear();
flushMembers.clear();
suspected.clear();
flushCoordinator = null;
}
}
@ManagedAttribute
public double getAverageFlushDuration() {
return averageFlushDuration;
}
@ManagedAttribute
public long getTotalTimeInFlush() {
return totalTimeInFlush;
}
@ManagedAttribute
public int getNumberOfFlushes() {
return numberOfFlushes;
}
@ManagedOperation(description="Request cluster flush")
public boolean startFlush() {
return startFlush(new Event(Event.SUSPEND));
}
private boolean startFlush(Event evt){
if(log.isDebugEnabled())
log.debug("Received " + evt + " at " + localAddress + ". Running FLUSH...");
List<Address> flushParticipants = (List<Address>) evt.getArg();
return startFlush(flushParticipants);
}
private boolean startFlush(List<Address> flushParticipants) {
boolean successfulFlush = false;
if (!flushInProgress.get()) {
flush_promise.reset();
onSuspend(flushParticipants);
try {
Boolean r = flush_promise.getResultWithTimeout(start_flush_timeout);
successfulFlush = r.booleanValue();
} catch (TimeoutException e) {
if (log.isDebugEnabled())
log.debug("At " + localAddress
+ " timed out waiting for flush responses after "
+ start_flush_timeout + " msec");
}
}
return successfulFlush;
}
@ManagedOperation(description="Request end of flush in a cluster")
public void stopFlush() {
down(new Event(Event.RESUME));
}
public Object down(Event evt) {
switch(evt.getType()){
case Event.MSG:
Message msg = (Message) evt.getArg();
Address dest = msg.getDest();
if(dest == null || dest.isMulticastAddress()){
//mcasts
FlushHeader fh = (FlushHeader) msg.getHeader(getName());
if(fh != null && fh.type == FlushHeader.FLUSH_BYPASS){
return down_prot.down(evt);
}
else{
blockMessageDuringFlush();
}
}else{
//unicasts are irrelevant in virtual synchrony, let them through
return down_prot.down(evt);
}
break;
case Event.CONNECT:
case Event.CONNECT_WITH_STATE_TRANSFER:
case Event.CONNECT_USE_FLUSH:
case Event.CONNECT_WITH_STATE_TRANSFER_USE_FLUSH:
if(sentBlock.compareAndSet(false, true)){
sendBlockUpToChannel();
}
Object result=down_prot.down(evt);
if(result instanceof Throwable) {
sentBlock.set(false); // set the var back to its original state if we cannot connect successfully
}
return result;
case Event.SUSPEND:
return startFlush(evt);
case Event.RESUME:
onResume(evt);
return null;
case Event.SET_LOCAL_ADDRESS:
localAddress = (Address) evt.getArg();
break;
}
return down_prot.down(evt);
}
private void blockMessageDuringFlush() {
boolean shouldSuspendByItself = false;
long start = 0, stop = 0;
synchronized(blockMutex){
while(isBlockingFlushDown){
if(log.isDebugEnabled())
log.debug("FLUSH block at " + localAddress
+ " for "
+ (timeout <= 0 ? "ever" : timeout + "ms"));
try{
start = System.currentTimeMillis();
if(timeout <= 0)
blockMutex.wait();
else
blockMutex.wait(timeout);
stop = System.currentTimeMillis();
}catch(InterruptedException e){
Thread.currentThread().interrupt(); // set interrupt flag again
}
if(isBlockingFlushDown){
isBlockingFlushDown = false;
shouldSuspendByItself = true;
blockMutex.notifyAll();
}
}
}
if(shouldSuspendByItself){
log.warn("unblocking FLUSH.down() at " + localAddress
+ " after timeout of "
+ (stop - start)
+ "ms");
flush_promise.setResult(Boolean.TRUE);
}
}
public Object up(Event evt) {
switch(evt.getType()){
case Event.MSG:
Message msg = (Message) evt.getArg();
final FlushHeader fh = (FlushHeader) msg.getHeader(getName());
if(fh != null){
switch(fh.type){
case FlushHeader.FLUSH_BYPASS:
return up_prot.up(evt);
case FlushHeader.START_FLUSH:
Collection<? extends Address> fp=fh.flushParticipants;
boolean amIParticipant = (fp != null && fp.contains(localAddress)) || msg.getSrc().equals(localAddress);
if(amIParticipant){
handleStartFlush(msg, fh);
}
else{
if (log.isDebugEnabled())
log.debug("Received START_FLUSH at " + localAddress
+ " but I am not flush participant, not responding");
}
break;
case FlushHeader.FLUSH_RECONCILE:
handleFlushReconcile(msg, fh);
break;
case FlushHeader.FLUSH_RECONCILE_OK:
onFlushReconcileOK(msg);
break;
case FlushHeader.STOP_FLUSH:
onStopFlush();
break;
case FlushHeader.ABORT_FLUSH:
Collection<? extends Address> flushParticipants = fh.flushParticipants;
if(flushParticipants != null && flushParticipants.contains(localAddress)){
if (log.isDebugEnabled()) {
log.debug("At " + localAddress
+ " received ABORT_FLUSH from flush coordinator " + msg.getSrc()
+ ", am i flush participant="
+ flushParticipants.contains(localAddress));
}
flushInProgress.set(false);
flushNotCompletedMap.clear();
flushCompletedMap.clear();
}
break;
case FlushHeader.FLUSH_NOT_COMPLETED:
if (log.isDebugEnabled()) {
log.debug("At " + localAddress
+ " received FLUSH_NOT_COMPLETED from "
+ msg.getSrc());
}
boolean flushCollision = false;
synchronized(sharedLock){
flushNotCompletedMap.add(msg.getSrc());
flushCollision = !flushCompletedMap.isEmpty();
if(flushCollision){
flushNotCompletedMap.clear();
flushCompletedMap.clear();
}
}
if (log.isDebugEnabled()) {
log.debug("At " + localAddress
+ " received FLUSH_NOT_COMPLETED from "
+ msg.getSrc() + " collision=" + flushCollision);
}
//reject flush if we have at least one OK and at least one FAIL
if(flushCollision){
Runnable r = new Runnable(){
public void run() {
//wait a bit so ABORTs do not get received before other possible FLUSH_COMPLETED
Util.sleep(1000);
rejectFlush(fh.flushParticipants, fh.viewID);
}
};
new Thread(r).start();
}
//however, flush should fail/retry as soon as one FAIL is received
flush_promise.setResult(Boolean.FALSE);
break;
case FlushHeader.FLUSH_COMPLETED:
if(isCurrentFlushMessage(fh))
onFlushCompleted(msg.getSrc(), fh);
break;
}
return null; // do not pass FLUSH msg up
}else{
// for processing of application messages after we join,
// lets wait for STOP_FLUSH to complete
// before we start allowing message up.
Address dest=msg.getDest();
if(dest != null && !dest.isMulticastAddress()) {
return up_prot.up(evt); // allow unicasts to pass, virtual synchrony olny applies to multicasts
}
if(!allowMessagesToPassUp)
return null;
}
break;
case Event.VIEW_CHANGE:
/*
* [JGRP-618] - FLUSH coordinator transfer reorders
* block/unblock/view events in applications (TCP stack only)
*
*/
up_prot.up(evt);
View newView = (View) evt.getArg();
boolean coordinatorLeft = onViewChange(newView);
boolean singletonMember = newView.size() == 1 && newView.containsMember(localAddress);
boolean isThisOurFirstView = viewCounter.addAndGet(1) == 1;
// if this is channel's first view and its the only member of the group - no flush was run
// but the channel application should still receive BLOCK,VIEW,UNBLOCK
//also if coordinator of flush left each member should run stopFlush individually.
if((isThisOurFirstView && singletonMember) || coordinatorLeft){
onStopFlush();
}
return null;
case Event.TMP_VIEW:
/*
* April 25, 2007
*
* Accommodating current NAKACK (1.127)
*
* Updates field currentView of a leaving coordinator. Leaving
* coordinator, after it sends out the view, does not need to
* participate in second flush phase.
*
* see onStopFlush();
*
* TODO: revisit if still needed post NAKACK 1.127
*
*/
View tmpView = (View) evt.getArg();
if(!tmpView.containsMember(localAddress)){
onViewChange(tmpView);
}
break;
case Event.SUSPECT:
onSuspect((Address) evt.getArg());
break;
case Event.SUSPEND:
return startFlush(evt);
case Event.RESUME:
onResume(evt);
return null;
}
return up_prot.up(evt);
}
private void onFlushReconcileOK(Message msg) {
if(log.isDebugEnabled())
log.debug(localAddress + " received reconcile ok from " + msg.getSrc());
synchronized(sharedLock){
reconcileOks.add(msg.getSrc());
if(reconcileOks.size() >= flushMembers.size()){
flush_promise.setResult(Boolean.TRUE);
if(log.isDebugEnabled())
log.debug("All FLUSH_RECONCILE_OK received at " + localAddress);
}
}
}
private void handleFlushReconcile(Message msg, FlushHeader fh) {
Address requester = msg.getSrc();
Digest reconcileDigest = fh.digest;
if(log.isDebugEnabled())
log.debug("Received FLUSH_RECONCILE at " + localAddress
+ " passing digest to NAKACK "
+ reconcileDigest);
// Let NAKACK reconcile missing messages
down_prot.down(new Event(Event.REBROADCAST, reconcileDigest));
if(log.isDebugEnabled())
log.debug("Returned from FLUSH_RECONCILE at " + localAddress
+ " Sending RECONCILE_OK to "
+ requester
+ ", thread "
+ Thread.currentThread());
Message reconcileOk = new Message(requester);
reconcileOk.setFlag(Message.OOB);
reconcileOk.putHeader(getName(), new FlushHeader(FlushHeader.FLUSH_RECONCILE_OK));
down_prot.down(new Event(Event.MSG, reconcileOk));
}
private void handleStartFlush(Message msg, FlushHeader fh) {
Address flushRequester = msg.getSrc();
boolean proceed = flushInProgress.compareAndSet(false, true);
if (proceed) {
synchronized (sharedLock) {
flushCoordinator = flushRequester;
}
onStartFlush(flushRequester, fh);
}
else{
FlushHeader fhr=new FlushHeader(FlushHeader.FLUSH_NOT_COMPLETED, fh.viewID,fh.flushParticipants);
Message response=new Message(flushRequester);
response.putHeader(getName(), fhr);
down_prot.down(new Event(Event.MSG, response));
if(log.isDebugEnabled())
log.debug("Received START_FLUSH at " + localAddress
+ " responded with FLUSH_NOT_COMPLETED to "
+ flushRequester);
}
}
private void rejectFlush(Collection<? extends Address> participants,long viewId) {
for(Address flushMember:participants){
Message reject = new Message(flushMember, localAddress, null);
reject.putHeader(getName(), new FlushHeader(FlushHeader.ABORT_FLUSH,viewId,participants));
down_prot.down(new Event(Event.MSG, reject));
}
}
public Vector<Integer> providedDownServices() {
Vector<Integer> retval = new Vector<Integer>(2);
retval.addElement(new Integer(Event.SUSPEND));
retval.addElement(new Integer(Event.RESUME));
return retval;
}
private void sendBlockUpToChannel() {
up_prot.up(new Event(Event.BLOCK));
sentUnblock.set(false);
}
private void sendUnBlockUpToChannel() {
sentBlock.set(false);
up_prot.up(new Event(Event.UNBLOCK));
}
private boolean isCurrentFlushMessage(FlushHeader fh) {
return fh.viewID == currentViewId();
}
private long currentViewId() {
long viewId = -1;
synchronized(sharedLock){
ViewId view = currentView.getVid();
if(view != null){
viewId = view.getId();
}
}
return viewId;
}
private boolean onViewChange(View view) {
boolean coordinatorLeft = false;
View oldView;
synchronized(sharedLock){
suspected.retainAll(view.getMembers());
oldView = currentView;
currentView = view;
coordinatorLeft = !oldView.getMembers().isEmpty() &&
!view.getMembers().isEmpty() &&
!view.containsMember(oldView.getCreator());
}
if(log.isDebugEnabled())
log.debug("Installing view at " + localAddress + " view is " + view);
return coordinatorLeft;
}
private void onStopFlush() {
if(stats){
long stopFlushTime = System.currentTimeMillis();
totalTimeInFlush += (stopFlushTime - startFlushTime);
if(numberOfFlushes > 0){
averageFlushDuration = totalTimeInFlush / (double) numberOfFlushes;
}
}
synchronized(sharedLock){
flushCompletedMap.clear();
flushNotCompletedMap.clear();
flushMembers.clear();
suspected.clear();
flushCoordinator = null;
allowMessagesToPassUp = true;
flushCompleted = false;
}
if(log.isDebugEnabled())
log.debug("At " + localAddress
+ " received STOP_FLUSH, unblocking FLUSH.down() and sending UNBLOCK up");
synchronized(blockMutex){
isBlockingFlushDown = false;
blockMutex.notifyAll();
}
if(sentUnblock.compareAndSet(false,true)){
//ensures that we do not repeat unblock event
sendUnBlockUpToChannel();
}
flushInProgress.set(false);
}
private void onSuspend(List<Address> members) {
Message msg = null;
Collection<Address> participantsInFlush = null;
synchronized(sharedLock){
// start FLUSH only on group members that we need to flush
if(members != null){
participantsInFlush = members;
participantsInFlush.retainAll(currentView.getMembers());
}else{
participantsInFlush = new ArrayList<Address>(currentView.getMembers());
}
msg = new Message(null, localAddress, null);
msg.putHeader(getName(), new FlushHeader(FlushHeader.START_FLUSH,
currentViewId(),
participantsInFlush));
}
if(participantsInFlush.isEmpty()){
flush_promise.setResult(Boolean.TRUE);
}else{
down_prot.down(new Event(Event.MSG, msg));
if(log.isDebugEnabled())
log.debug("Flush coordinator " + localAddress
+ " is starting FLUSH with participants "
+ participantsInFlush);
}
}
private void onResume(Event evt) {
List<Address> members = (List<Address>) evt.getArg();
long viewID = currentViewId();
if(members == null || members.isEmpty()){
Message msg = new Message(null, localAddress, null);
//Cannot be OOB since START_FLUSH is not OOB
//we have to FIFO order two subsequent flushes
msg.putHeader(getName(), new FlushHeader(FlushHeader.STOP_FLUSH, viewID));
down_prot.down(new Event(Event.MSG, msg));
if(log.isDebugEnabled())
log.debug("Received RESUME at " + localAddress + ", sent STOP_FLUSH to all");
}else{
for (Address address : members) {
Message msg = new Message(address, localAddress, null);
//Cannot be OOB since START_FLUSH is not OOB
//we have to FIFO order two subsequent flushes
msg.putHeader(getName(), new FlushHeader(FlushHeader.STOP_FLUSH, viewID));
down_prot.down(new Event(Event.MSG, msg));
if(log.isDebugEnabled())
log.debug("Received RESUME at " + localAddress + ", sent STOP_FLUSH to " + address);
}
}
}
private void onStartFlush(Address flushStarter, FlushHeader fh) {
if(stats){
startFlushTime = System.currentTimeMillis();
numberOfFlushes += 1;
}
boolean proceed = false;
synchronized(sharedLock){
flushCoordinator = flushStarter;
flushMembers.clear();
if(fh.flushParticipants != null){
flushMembers.addAll(fh.flushParticipants);
}
proceed = flushMembers.contains(localAddress);
flushMembers.removeAll(suspected);
}
if(proceed) {
if(sentBlock.compareAndSet(false, true)) {
//ensures that we do not repeat block event
//and that we do not send block event to non participants
sendBlockUpToChannel();
synchronized(blockMutex) {
isBlockingFlushDown=true;
}
}
else {
if(log.isDebugEnabled())
log.debug("Received START_FLUSH at " + localAddress
+ " but not sending BLOCK up");
}
Digest digest=(Digest)down_prot.down(new Event(Event.GET_DIGEST));
FlushHeader fhr=new FlushHeader(FlushHeader.FLUSH_COMPLETED, fh.viewID,fh.flushParticipants);
fhr.addDigest(digest);
Message msg=new Message(flushStarter);
msg.putHeader(getName(), fhr);
down_prot.down(new Event(Event.MSG, msg));
if(log.isDebugEnabled())
log.debug("Received START_FLUSH at " + localAddress
+ " responded with FLUSH_COMPLETED to "
+ flushStarter);
}
}
private void onFlushCompleted(Address address, final FlushHeader header) {
Message msg = null;
boolean needsReconciliationPhase = false;
boolean collision = false;
Digest digest = header.digest;
synchronized(sharedLock){
flushCompletedMap.put(address, digest);
flushCompleted = flushCompletedMap.size() >= flushMembers.size()
&& !flushMembers.isEmpty()
&& flushCompletedMap.keySet().containsAll(flushMembers);
collision = !flushNotCompletedMap.isEmpty();
if(log.isDebugEnabled())
log.debug("At " + localAddress
+ " FLUSH_COMPLETED from "
+ address
+ ",completed "
+ flushCompleted
+ ",flushMembers "
+ flushMembers
+ ",flushCompleted "
+ flushCompletedMap.keySet());
needsReconciliationPhase = enable_reconciliation && flushCompleted
&& hasVirtualSynchronyGaps();
if(needsReconciliationPhase){
Digest d = findHighestSequences();
msg = new Message();
msg.setFlag(Message.OOB);
FlushHeader fh = new FlushHeader(FlushHeader.FLUSH_RECONCILE,
currentViewId(),
flushMembers);
reconcileOks.clear();
fh.addDigest(d);
msg.putHeader(getName(), fh);
if(log.isDebugEnabled())
log.debug("At "+ localAddress + " reconciling flush mebers due to virtual synchrony gap, digest is " + d
+ " flush members are "
+ flushMembers);
flushCompletedMap.clear();
} else if (flushCompleted){
flushCompletedMap.clear();
} else if (collision){
flushNotCompletedMap.clear();
flushCompletedMap.clear();
}
}
if(needsReconciliationPhase){
down_prot.down(new Event(Event.MSG, msg));
}else if(flushCompleted){
flush_promise.setResult(Boolean.TRUE);
if(log.isDebugEnabled())
log.debug("All FLUSH_COMPLETED received at " + localAddress);
}else if(collision){
//reject flush if we have at least one OK and at least one FAIL
Runnable r = new Runnable(){
public void run() {
//wait a bit so ABORTs do not get received before other possible FLUSH_COMPLETED
Util.sleep(1000);
rejectFlush(header.flushParticipants, header.viewID);
}
};
new Thread(r).start();
}
}
private boolean hasVirtualSynchronyGaps() {
ArrayList<Digest> digests = new ArrayList<Digest>();
digests.addAll(flushCompletedMap.values());
Digest firstDigest = digests.get(0);
List<Digest> remainingDigests = digests.subList(1, digests.size());
for(Digest digest:remainingDigests){
Digest diff = firstDigest.difference(digest);
if(diff != Digest.EMPTY_DIGEST){
return true;
}
}
return false;
}
private Digest findHighestSequences() {
Digest result = null;
List<Digest> digests = new ArrayList<Digest>(flushCompletedMap.values());
result = digests.get(0);
List<Digest> remainingDigests = digests.subList(1, digests.size());
for(Digest digestG:remainingDigests){
result = result.highestSequence(digestG);
}
return result;
}
private void onSuspect(Address address) {
//handles FlushTest#testFlushWithCrashedFlushCoordinator
boolean amINeighbourOfCrashedFlushCoordinator = false;
ArrayList<Address> flushMembersCopy = null;
synchronized(sharedLock){
boolean flushCoordinatorSuspected = address.equals(flushCoordinator);
if(flushCoordinatorSuspected && flushMembers != null){
int indexOfCoordinator = flushMembers.indexOf(flushCoordinator);
int myIndex = flushMembers.indexOf(localAddress);
int diff = myIndex - indexOfCoordinator;
amINeighbourOfCrashedFlushCoordinator = (diff == 1 || (myIndex==0 && indexOfCoordinator == flushMembers.size()));
if(amINeighbourOfCrashedFlushCoordinator){
flushMembersCopy = new ArrayList<Address>(flushMembers);
}
}
}
if(amINeighbourOfCrashedFlushCoordinator){
if(log.isDebugEnabled())
log.debug("Flush coordinator " + flushCoordinator + " suspected, " + localAddress + " is neighbour, completing flush ");
onResume(new Event(Event.RESUME, flushMembersCopy));
}
//handles FlushTest#testFlushWithCrashedNonCoordinators
boolean flushOkCompleted = false;
Message m = null;
long viewID = 0;
synchronized(sharedLock){
suspected.add(address);
flushMembers.removeAll(suspected);
viewID = currentViewId();
flushOkCompleted = !flushCompletedMap.isEmpty() && flushCompletedMap.keySet().containsAll(flushMembers);
if(flushOkCompleted){
m = new Message(flushCoordinator, localAddress, null);
}
if(log.isDebugEnabled())
log.debug("Suspect is " + address
+ ",completed "
+ flushOkCompleted
+ ", flushOkSet "
+ flushCompletedMap
+ " flushMembers "
+ flushMembers);
}
if(flushOkCompleted){
Digest digest = (Digest) down_prot.down(new Event(Event.GET_DIGEST));
FlushHeader fh = new FlushHeader(FlushHeader.FLUSH_COMPLETED, viewID);
fh.addDigest(digest);
m.putHeader(getName(), fh);
down_prot.down(new Event(Event.MSG, m));
if(log.isDebugEnabled())
log.debug(localAddress + " sent FLUSH_COMPLETED message to " + flushCoordinator);
}
}
public static class FlushHeader extends Header implements Streamable {
public static final byte START_FLUSH = 0;
public static final byte STOP_FLUSH = 2;
public static final byte FLUSH_COMPLETED = 3;
public static final byte ABORT_FLUSH = 5;
public static final byte FLUSH_BYPASS = 6;
public static final byte FLUSH_RECONCILE = 7;
public static final byte FLUSH_RECONCILE_OK = 8;
public static final byte FLUSH_NOT_COMPLETED = 9;
byte type;
long viewID;
Collection<? extends Address> flushParticipants;
Digest digest = null;
private static final long serialVersionUID=-6248843990215637687L;
public FlushHeader(){
this(START_FLUSH, 0);
} // used for externalization
public FlushHeader(byte type){
this(type, 0);
}
public FlushHeader(byte type,long viewID){
this(type, viewID, null);
}
public FlushHeader(byte type,long viewID,Collection<? extends Address> flushView){
this.type = type;
this.viewID = viewID;
if(flushView != null){
this.flushParticipants = new ArrayList<Address>(flushView);
}
}
@Override
public int size() {
int retval=Global.BYTE_SIZE; // type
retval+=Global.LONG_SIZE; // viewID
retval+= Util.size(flushParticipants);
retval+=Global.BYTE_SIZE; // presence for digest
if(digest != null){
retval += digest.serializedSize();
}
return retval;
}
public void addDigest(Digest digest) {
this.digest = digest;
}
public String toString() {
switch(type){
case START_FLUSH:
return "FLUSH[type=START_FLUSH,viewId=" + viewID
+ ",members="
+ flushParticipants
+ "]";
case STOP_FLUSH:
return "FLUSH[type=STOP_FLUSH,viewId=" + viewID + "]";
case ABORT_FLUSH:
return "FLUSH[type=ABORT_FLUSH,viewId=" + viewID + "]";
case FLUSH_COMPLETED:
return "FLUSH[type=FLUSH_COMPLETED,viewId=" + viewID + "]";
case FLUSH_BYPASS:
return "FLUSH[type=FLUSH_BYPASS,viewId=" + viewID + "]";
case FLUSH_RECONCILE:
return "FLUSH[type=FLUSH_RECONCILE,viewId=" + viewID + ",digest=" + digest + "]";
case FLUSH_RECONCILE_OK:
return "FLUSH[type=FLUSH_RECONCILE_OK,viewId=" + viewID + "]";
default:
return "[FLUSH: unknown type (" + type + ")]";
}
}
public void writeExternal(ObjectOutput out) throws IOException {
out.writeByte(type);
out.writeLong(viewID);
out.writeObject(flushParticipants);
out.writeObject(digest);
}
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
type = in.readByte();
viewID = in.readLong();
flushParticipants = (Collection<Address>) in.readObject();
digest = (Digest) in.readObject();
}
public void writeTo(DataOutputStream out) throws IOException {
out.writeByte(type);
out.writeLong(viewID);
Util.writeAddresses(flushParticipants, out);
Util.writeStreamable(digest, out);
}
public void readFrom(DataInputStream in) throws IOException,
IllegalAccessException,
InstantiationException {
type = in.readByte();
viewID = in.readLong();
flushParticipants = Util.readAddresses(in, ArrayList.class);
digest = (Digest) Util.readStreamable(Digest.class, in);
}
}
}
|
package uk.org.cinquin.mutinack;
import static uk.org.cinquin.mutinack.misc_util.Util.nonNullify;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Supplier;
import org.eclipse.collections.api.list.MutableList;
import org.eclipse.collections.impl.factory.Lists;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import contrib.edu.stanford.nlp.util.HasInterval;
import contrib.edu.stanford.nlp.util.Interval;
import contrib.net.sf.samtools.Cigar;
import contrib.net.sf.samtools.CigarElement;
import contrib.net.sf.samtools.CigarOperator;
import contrib.net.sf.samtools.SAMFileReader;
import contrib.net.sf.samtools.SAMFileReader.QueryInterval;
import contrib.net.sf.samtools.SAMRecord;
import contrib.net.sf.samtools.SAMRecordIterator;
import contrib.net.sf.samtools.SamPairUtil.PairOrientation;
import contrib.nf.fr.eraasoft.pool.PoolException;
import contrib.uk.org.lidalia.slf4jext.Logger;
import contrib.uk.org.lidalia.slf4jext.LoggerFactory;
import gnu.trove.list.TIntList;
import gnu.trove.list.array.TIntArrayList;
import gnu.trove.map.hash.TObjectByteHashMap;
import uk.org.cinquin.mutinack.candidate_sequences.ExtendedAlignmentBlock;
import uk.org.cinquin.mutinack.misc_util.Assert;
import uk.org.cinquin.mutinack.misc_util.SettableInteger;
import uk.org.cinquin.mutinack.misc_util.Util;
import uk.org.cinquin.mutinack.misc_util.exceptions.ParseRTException;
/**
* Hashcode and equality based on read name + first or second of pair.
* @author olivier
*
*/
public final class ExtendedSAMRecord implements HasInterval<Integer> {
static final Logger logger = LoggerFactory.getLogger(ExtendedSAMRecord.class);
public boolean discarded = false;
private final @Nullable Map<String, ExtendedSAMRecord> extSAMCache;
public final @NonNull SAMRecord record;
private final @NonNull String name;
private @Nullable ExtendedSAMRecord mate;
private boolean triedRetrievingMateFromFile = false;
private final @NonNull String mateName;
private final int hashCode;
public @Nullable DuplexRead duplexRead;
private byte @Nullable[] mateVariableBarcode;
public final byte @NonNull[] variableBarcode;
public final byte @Nullable[] constantBarcode;
public final @NonNull SequenceLocation location;
final int medianPhred;
final float averagePhred;
private final Cigar cigar;
/**
* Length of read ignoring trailing Ns.
*/
public final int effectiveLength;
int nReferenceDisagreements = 0;
public static final byte PHRED_NO_ENTRY = -1;
public final @NonNull TObjectByteHashMap<SequenceLocation> basePhredScores =
new TObjectByteHashMap<>(150, 0.5f, PHRED_NO_ENTRY);
private int nClipped = -1;
private Boolean formsWrongPair;
public boolean processed = false;
public boolean duplexAlreadyVisitedForStats = false;
public final int xLoc, yLoc;
public final String runAndTile;
public boolean opticalDuplicate = false;
public boolean hasOpticalDuplicates = false;
public boolean visitedForOptDups = false;
public int tempIndex0 = -1, tempIndex1 = -1;
private final @NonNull MutinackGroup groupSettings;
private final @NonNull Mutinack analyzer;
public static @NonNull String getReadFullName(SAMRecord rec, boolean getMate) {
return (rec.getReadName() + "--" + ((getMate ^ rec.getFirstOfPairFlag())? "1" : "2") + "--" +
(getMate ? rec.getMateAlignmentStart() : rec.getAlignmentStart())) +
(!getMate && rec.getSupplementaryAlignmentFlag() ? "--suppl" : "")/*.intern()*/;
}
public @NonNull String getFullName() {
return name;
}
@Override
public final int hashCode() {
return hashCode;
}
@Override
public final boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
return name.equals(((ExtendedSAMRecord) obj).name);
}
private void computeNClipped() {
final int readLength = record.getReadLength();
final int adapterClipped = readLength - effectiveLength;
int nClippedLeft = (!getReadNegativeStrandFlag() ?
/* positive strand */
getAlignmentStart() - getUnclippedStart() :
/* negative strand */
/* Note: getMateAlignmentEnd will return Integer.MAX_INT if mate not loaded*/
(getAlignmentStart() <= getMateAlignmentStart() ?
/* adapter run through, causes clipping we should ignore */
0 :
getAlignmentStart() - getUnclippedStart() - adapterClipped));
nClippedLeft = Math.max(0, nClippedLeft);
int nClippedRight = getReadNegativeStrandFlag() ?
/* negative strand */
getUnclippedEnd() - getAlignmentEnd() :
/* positive strand */
(getAlignmentEnd() >= getMateAlignmentEnd() ?
/* adapter run through, causes clipping we should ignore */
0 :
getUnclippedEnd() - getAlignmentEnd() - adapterClipped);
nClippedRight = Math.max(0, nClippedRight);
nClipped = nClippedLeft + nClippedRight;
}
public int getnClipped() {
if (nClipped == -1) {
computeNClipped();
}
return nClipped;
}
public void resetnClipped() {
nClipped = -1;
}
@SuppressWarnings("static-access")
public ExtendedSAMRecord(@NonNull SAMRecord rec, @NonNull String fullName,
@NonNull List<@NonNull AnalysisStats> stats,
@NonNull Mutinack analyzer, @NonNull SequenceLocation location,
@Nullable Map<String, ExtendedSAMRecord> extSAMCache) {
this.groupSettings = Objects.requireNonNull(analyzer.groupSettings);
this.analyzer = Objects.requireNonNull(analyzer);
this.extSAMCache = extSAMCache;
this.name = Objects.requireNonNull(fullName);
this.record = Objects.requireNonNull(rec);
this.cigar = rec.getCigar();
this.location = location;
hashCode = fullName.hashCode();
mateName = getReadFullName(rec, true);
final int readLength = rec.getReadLength();
//Find effective end of read, i.e. first position that is not an 'N' (the trimming
//step run prior to mutation detection might shorten reads that ran into the
//adapter because the insert was shorter than read length, by transforming all
//bases that should be ignored to an N)
@SuppressWarnings("hiding")
int effectiveLength = readLength;
final byte[] read = record.getReadBases();
final byte[] baseQualities = record.getBaseQualities();
if (getReadNegativeStrandFlag()) {
int i = 0;
while (read[i] == 'N' &&
i < readLength - 1) {
i++;
}
effectiveLength = readLength - i;
} else {
while (read[effectiveLength - 1] == 'N' &&
effectiveLength > 0) {
effectiveLength
}
}
Assert.isFalse(effectiveLength < 0);
this.effectiveLength = effectiveLength;
int sumBaseQualities0 = 0;
int nConsidered0 = 0;
TIntList qualities = new TIntArrayList(effectiveLength);
int n = Math.min(effectiveLength, readLength / 2);
for (int index1 = 0; index1 < n; index1++) {
nConsidered0++;
final byte b = baseQualities[index1];
sumBaseQualities0 += b;
stats.forEach(s -> s.nProcessedBases.add(location, 1));
stats.forEach(s -> s.phredSumProcessedbases.add(b));
qualities.add(b);
}
int avQuality = sumBaseQualities0 / nConsidered0;
stats.forEach(s-> s.averageReadPhredQuality0.insert(avQuality));
int sumBaseQualities1 = 0;
int nConsidered1 = 0;
for (int index1 = readLength / 2; index1 < effectiveLength; index1++) {
nConsidered1++;
final byte b = baseQualities[index1];
sumBaseQualities1 += b;
stats.forEach(s -> s.nProcessedBases.add(location, 1));
stats.forEach(s -> s.phredSumProcessedbases.add(b));
qualities.add(b);
}
if (nConsidered1 > 0) {
int avQuality1 = sumBaseQualities1 / nConsidered1;
stats.forEach(s -> s.averageReadPhredQuality1.insert(avQuality1));
}
qualities.sort();
medianPhred = qualities.get(qualities.size() / 2);
averagePhred = (sumBaseQualities0 + sumBaseQualities1) / ((float) (nConsidered0 + nConsidered1));
stats.forEach(s -> s.medianReadPhredQuality.insert(medianPhred));
Assert.isTrue(rec.getUnclippedEnd() - 1 >= getAlignmentEnd(),
(Supplier<Object>) () -> "" + (rec.getUnclippedEnd() - 1),
(Supplier<Object>) this::toString,
"Unclipped end is %s for read %s");
Assert.isTrue(rec.getAlignmentStart() - 1 >= getUnclippedStart());
final @NonNull String fullBarcodeString;
String bcAttr = (String) record.getAttribute("BC");
if (groupSettings.getVariableBarcodeEnd() > 0) {
if (bcAttr == null) {
final int firstIndex = name.indexOf("BC:Z:");
if (firstIndex == -1) {
throw new ParseRTException("Missing first barcode for read " + name +
' ' + record.toString());
}
final int index;
if (record.getFirstOfPairFlag()) {
index = firstIndex;
} else {
index = name.indexOf("BC:Z:", firstIndex + 1);
if (index == -1) {
throw new ParseRTException("Missing second barcode for read " + name +
' ' + record.toString());
}
}
fullBarcodeString = nonNullify(name.substring(index + 5, name.indexOf('_', index)));
} else {
fullBarcodeString = bcAttr;
}
variableBarcode = Util.getInternedVB(fullBarcodeString.substring(
groupSettings.getVariableBarcodeStart(), groupSettings.getVariableBarcodeEnd() + 1).getBytes());
constantBarcode = Util.getInternedCB(fullBarcodeString.substring(
groupSettings.getConstantBarcodeStart(), groupSettings.getConstantBarcodeEnd() + 1).getBytes());
} else {
variableBarcode = EMPTY_BARCODE;
constantBarcode = DUMMY_BARCODE;//EMPTY_BARCODE
}
String readName = record.getReadName();
int endFirstChunk = nthIndexOf(readName, ':', 5);
//Interning below required for equality checks performed in optical duplicate detection
runAndTile = record.getReadName().substring(0, endFirstChunk).intern();
byte[] readNameBytes = readName.getBytes();
xLoc = parseInt(readNameBytes, endFirstChunk + 1);
int endXLoc = readName.indexOf(':', endFirstChunk + 1);
yLoc = parseInt(readNameBytes, endXLoc + 1);
//interval = Interval.toInterval(rec.getAlignmentStart(), rec.getAlignmentEnd());
}
private static boolean LENIENT_COORDINATE_PARSING = true;
private static int parseInt(final byte[] b, final int fromIndex) {
final int end = b.length - 1;
int i = fromIndex;
int result = 0;
while (i <= end) {
if (b[i] == ':') {
return result;
}
byte character = b[i];
if (character < 48 || character > 57) {
if (LENIENT_COORDINATE_PARSING) {
return result;
}
throw new ParseRTException("Character " + character + " is not a digit when parsing " + new String(b)
+ " from " + fromIndex);
}
result = 10 * result + b[i] - 48;
i++;
}
return result;
}
private static int nthIndexOf(final String s, final char c, final int n) {
int i = -1;
int found = 0;
while (found < n) {
i = s.indexOf(c, i + 1);
found++;
}
return i;
}
private static final byte @NonNull[] EMPTY_BARCODE = new byte [0];
private static final byte @NonNull[] DUMMY_BARCODE = {'N', 'N', 'N'};
public ExtendedSAMRecord(@NonNull SAMRecord rec,
@NonNull Mutinack analyzer, @NonNull SequenceLocation location,
@NonNull Map<String, ExtendedSAMRecord> extSAMCache) {
this(rec, getReadFullName(rec, false),
analyzer.stats, analyzer, location, extSAMCache);
}
public byte @NonNull[] getMateVariableBarcode() {
if (mateVariableBarcode == null ||
mateVariableBarcode == groupSettings.getNs()) {
checkMate();
if (mate == null) {
mateVariableBarcode = groupSettings.getNs();
} else {
mateVariableBarcode = nonNullify(mate).variableBarcode;
}
}
return Objects.requireNonNull(mateVariableBarcode);
}
@Override
public String toString() {
return (discarded ? "DISCARDED" : "" ) + name + ": " + "startNoBC: " + getAlignmentStart() +
"; endNoBC: " + getAlignmentEnd() +
"; alignmentStart: " + (getReadNegativeStrandFlag() ? "-" : "+") + getAlignmentStart() +
"; alignmentEnd: " + getAlignmentEnd() +
"; cigar: " + record.getCigarString() +
"; length: " + record.getReadLength() +
"; effectiveLength: " + effectiveLength +
"; nClipped: " + (nClipped == -1 ? "Uncomputed" : getnClipped()) +
"; insertSize: " + getInsertSize() +
"; bases: " + new String(record.getReadBases());
}
@Override
public Interval<Integer> getInterval() {
throw new RuntimeException("Unimplemented");
//return interval;
}
public int referencePositionToReadPosition(int refPosition) {
if (refPosition <= getAlignmentStart()) {
return refPosition - getUnclippedStart();
}
List<CigarElement> cElmnts = getCigar().getCigarElements();
final int nElmnts = cElmnts.size();
int ceIndex = 0;
int nReadBasesProcessed = getAlignmentStart() - getUnclippedStart();
final int nBasesToAlign = refPosition - getAlignmentStart();
int nBasesAligned = 0;
while (ceIndex < nElmnts && nBasesAligned < nBasesToAlign) {
final CigarElement c = cElmnts.get(ceIndex);
final int blockLength = c.getLength();
switch(c.getOperator()) {
case M:
int nTakenBases = Math.min(blockLength, nBasesToAlign - nBasesAligned);
nBasesAligned += nTakenBases;
nReadBasesProcessed += nTakenBases;
break;
case I:
nReadBasesProcessed += blockLength;
break;
case D:
case N:
nBasesAligned += blockLength;
break;
default://Nothing to do
}
//Ignoring clipping at end of read
ceIndex++;
}
if (nBasesAligned == nBasesToAlign) {
return nReadBasesProcessed;
} else {
return nReadBasesProcessed + (nBasesToAlign - nBasesAligned);
}
}
public Cigar getCigar() {
return cigar;
}
private int intronAdjustment(final int readPosition, boolean reverse) {
if (!groupSettings.isRnaSeq()) {
return 0;
}
SettableInteger nReadBases = new SettableInteger(0);
SettableInteger intronBases = new SettableInteger(0);
MutableList<CigarElement> cigarElements = Lists.mutable.withAll(getCigar().getCigarElements());
if (reverse) {
cigarElements.reverseThis();
}
cigarElements.detect(e -> {
CigarOperator operator = e.getOperator();
int truncatedLength = operator.consumesReadBases() ?
Math.min(e.getLength(), readPosition - nReadBases.get())
:
e.getLength();
if (operator.consumesReadBases()) {
nReadBases.addAndGet(truncatedLength);
}
if (operator == CigarOperator.N) {
intronBases.addAndGet(e.getLength());
}
if (nReadBases.get() == readPosition) {
return true;
}
return false;
});
Assert.isTrue(nReadBases.get() == readPosition);
return intronBases.get();
}
public static final int NO_MATE_POSITION = Integer.MAX_VALUE - 1000;
public int tooCloseToBarcode(int readPosition, int ignoreFirstNBases) {
final boolean readOnNegativeStrand = getReadNegativeStrandFlag();
final int distance0;
if (readOnNegativeStrand) {
distance0 = readPosition - ((record.getReadLength() - 1) - ignoreFirstNBases);
} else {
distance0 = ignoreFirstNBases - readPosition;
}
//Now check if position is too close to other adapter barcode ligation site,
//or on the wrong side of it
final int refPositionOfMateLigationSite = getRefPositionOfMateLigationSite();
final int distance1;
if (!formsWrongPair() && refPositionOfMateLigationSite != NO_MATE_POSITION) {
final int readPositionOfLigSiteA = referencePositionToReadPosition(refPositionOfMateLigationSite - 1) + 1;
final int readPositionOfLigSiteB = referencePositionToReadPosition(refPositionOfMateLigationSite + 1) - 1;
if (getReadNegativeStrandFlag()) {
distance1 = Math.max(readPositionOfLigSiteA, readPositionOfLigSiteB) + ignoreFirstNBases - readPosition;
} else {
distance1 = readPosition - (Math.min(readPositionOfLigSiteA, readPositionOfLigSiteB ) - ignoreFirstNBases);
}
} else {
//Mate info not available, or pair is "wrong" pair
//Just go by effectiveLength to infer presence of adapter, although
//it should not happen in practice that reads form a wrong pair
//when there is adapter read-through
final int readLength = record.getReadLength();
final int adapterClipped = readLength - effectiveLength;
if (readOnNegativeStrand) {
distance1 = (adapterClipped == 0) ?
Integer.MIN_VALUE :
ignoreFirstNBases + adapterClipped - readPosition;
} else {
distance1 = (adapterClipped == 0) ?
Integer.MIN_VALUE :
readPosition - (effectiveLength - ignoreFirstNBases - 1);
}
}
return Math.max(distance0, distance1);
}
public int getRefPositionOfMateLigationSite() {
return getReadNegativeStrandFlag() ?
getMateUnclippedStart() :
getMateUnclippedEnd();
}
public int getRefAlignmentStart() {
int referenceStart = getAlignmentStart();
Assert.isFalse(referenceStart < 0);
return referenceStart;
}
public int getRefAlignmentEnd() {
int referenceEnd = getAlignmentEnd();
Assert.isFalse(referenceEnd < 0, () -> "Negative alignment end in read " + this);
return referenceEnd;
}
public int getMateRefAlignmentStart() {
checkMate();
return mate == null ? NO_MATE_POSITION : nonNullify(mate).getRefAlignmentStart();
}
public int getMateRefAlignmentEnd() {
checkMate();
return mate == null ? NO_MATE_POSITION : nonNullify(mate).getRefAlignmentEnd();
}
public int getInsertSize() {
return record.getInferredInsertSize();
}
public ExtendedSAMRecord getMate() {
checkMate();
return mate;
}
//Adapted from SamPairUtil
public PairOrientation getPairOrientation() {
final boolean readIsOnReverseStrand = record.getReadNegativeStrandFlag();
if (record.getReadUnmappedFlag() || !record.getReadPairedFlag() || record.getMateUnmappedFlag()) {
throw new IllegalArgumentException("Invalid SAMRecord: " + record.getReadName() + ". This method only works for SAMRecords " +
"that are paired reads with both reads aligned.");
}
if (readIsOnReverseStrand == record.getMateNegativeStrandFlag()) {
return PairOrientation.TANDEM;
}
final int positiveStrandFivePrimePos =
readIsOnReverseStrand ?
getMateOffsetUnclippedStart()
:
getOffsetUnclippedStart();
final int negativeStrandFivePrimePos =
readIsOnReverseStrand ?
getOffsetUnclippedEnd()
:
getMateOffsetUnclippedEnd();
return
positiveStrandFivePrimePos < negativeStrandFivePrimePos ?
PairOrientation.FR
:
PairOrientation.RF;
}
@SuppressWarnings("null")
public boolean formsWrongPair() {
PairOrientation po;
if (formsWrongPair == null) {
formsWrongPair = record.getReadPairedFlag() && (
record.getReadUnmappedFlag() ||
record.getMateUnmappedFlag() ||
(((mate = checkMate()) != null) && !record.getReferenceIndex().equals(mate.record.getReferenceIndex())) ||
(po = getPairOrientation()) == PairOrientation.TANDEM ||
po == PairOrientation.RF
);
}
return formsWrongPair;
}
public boolean getReadNegativeStrandFlag() {
return record.getReadNegativeStrandFlag();
}
public boolean getReadPositiveStrand() {
return !record.getReadNegativeStrandFlag();
}
private ExtendedSAMRecord checkMate() {
if (mate == null) {
if (extSAMCache != null)
mate = extSAMCache.get(mateName);
if (mate == null && !triedRetrievingMateFromFile && !record.getMateUnmappedFlag()) {
mate = getRead(analyzer, record.getReadName(), !record.getFirstOfPairFlag(),
new SequenceLocation(record.getMateReferenceName(), groupSettings.indexContigNameReverseMap,
record.getMateAlignmentStart() - 1, false) , -1, 1);
triedRetrievingMateFromFile = true;
}
}
return mate;
}
/** Indexing starts at 0
*/
public int getAlignmentStart() {
return record.getAlignmentStart() - 1;
}
/** Indexing starts at 0
*/
public int getUnclippedStart() {
return record.getUnclippedStart() - 1;
}
/** Indexing starts at 0
*/
public int getMateAlignmentStart() {
return record.getMateAlignmentStart() - 1;
}
/** Indexing starts at 0
*/
public int getAlignmentEnd() {
return record.getAlignmentEnd() - 1;
}
/** Indexing starts at 0
*
* @return
*/
public int getMateAlignmentEnd() {
checkMate();
if (mate == null) {
return NO_MATE_POSITION;
}
return nonNullify(mate).getAlignmentEnd();
}
public int getMateUnclippedEnd() {
checkMate();
if (mate == null) {
return NO_MATE_POSITION;
}
return nonNullify(mate).getUnclippedEnd();
}
public int getOffsetUnclippedEnd() {
return record.getUnclippedEnd() - 1 - intronAdjustment(16, true);
}
public int getOffsetUnclippedStart() {
return record.getUnclippedStart() - 1 + intronAdjustment(16, false);
}
public int getMateOffsetUnclippedEnd() {
checkMate();
if (mate == null) {
return NO_MATE_POSITION;
}
return nonNullify(mate).getOffsetUnclippedEnd();
}
public int getMateOffsetUnclippedStart() {
checkMate();
if (mate == null) {
return NO_MATE_POSITION;
}
return nonNullify(mate).getOffsetUnclippedStart();
}
public int getUnclippedEnd() {
return record.getUnclippedEnd() - 1;
}
public int getMateUnclippedStart() {
checkMate();
if (mate == null) {
return NO_MATE_POSITION;
}
return nonNullify(mate).getUnclippedStart();
}
public int getMappingQuality() {
return record.getMappingQuality();
}
public boolean overlapsWith(SequenceLocation otherLocation) {
if (getRefAlignmentStart() > otherLocation.position ||
getRefAlignmentEnd() < otherLocation.position ||
otherLocation.contigIndex != getReferenceIndex()) {
return false;
}
return true;
}
boolean duplexLeft() {
return formsWrongPair() ?
getOffsetUnclippedStart() <= getMateOffsetUnclippedStart()
: getReadPositiveStrand();
}
public @NonNull SequenceLocation getLocation() {
return location;
}
/**
* Not necessarily the same as that of SAMRecord
* @return
*/
public int getReferenceIndex() {
return location.contigIndex;
}
public @NonNull String getReferenceName() {
return location.getContigName();
}
public int getxLoc() {
return xLoc;
}
public int getyLoc() {
return yLoc;
}
public String getRunAndTile() {
return runAndTile;
}
public boolean isOpticalDuplicate() {
return opticalDuplicate;
}
public float getAveragePhred() {
return averagePhred;
}
public List<ExtendedAlignmentBlock> getAlignmentBlocks() {
return ExtendedAlignmentBlock.getAlignmentBlocks(getCigar(), record.getAlignmentStart(), "read cigar");
}
public static @Nullable ExtendedSAMRecord getRead(Mutinack analyzer, String name, boolean firstOfPair,
SequenceLocation location, int avoidAlignmentStart0Based, int windowHalfWidth) {
SAMFileReader bamReader;
try {
bamReader = analyzer.readerPool.getObj();
} catch (PoolException e) {
throw new RuntimeException(e);
}
try {
final QueryInterval[] bamContig = {
bamReader.makeQueryInterval(location.contigName, location.position - windowHalfWidth, location.position + windowHalfWidth)};
try (SAMRecordIterator it = bamReader.queryOverlapping(bamContig)) {
while (it.hasNext()) {
SAMRecord record = it.next();
if (record.getReadName().equals(name) && record.getFirstOfPairFlag() == firstOfPair &&
record.getAlignmentStart() - 1 != avoidAlignmentStart0Based) {
return SubAnalyzer.getExtendedNoCaching(record,
new SequenceLocation(location.contigName, analyzer.groupSettings.indexContigNameReverseMap,
record.getAlignmentStart() - 1, false), analyzer);
}
}
return null;
}
} finally {
analyzer.readerPool.returnObj(bamReader);
}
}
}
|
package com.menny.android.anysoftkeyboard.keyboards;
import java.util.LinkedList;
public class KeyEventStateMachine {
public static final int KEYCODE_FIRST_CHAR = -4097;
private final class KeyEventTransition {
private KeyEventState next;
private int keyCode;
KeyEventTransition(int keyCode, KeyEventState next) {
this.next = next;
this.keyCode = keyCode;
}
}
private final class KeyEventState {
private LinkedList<KeyEventTransition> transitions;
private int result;
KeyEventState() {
this.result = 0;
}
public KeyEventState getNext(int keyCode) {
if (this.transitions == null)
return null;
for (KeyEventTransition transition: this.transitions) {
if (transition.keyCode == keyCode) {
return transition.next;
}
}
return null;
}
public void addNextState(int keyCode, KeyEventState next) {
if (this.transitions == null)
this.transitions = new LinkedList<KeyEventTransition>();
this.transitions.add(new KeyEventTransition(keyCode, next));
}
public void setCharacter(int result) {
this.result = result;
}
public boolean hasNext() {
return (this.transitions != null);
}
}
private KeyEventState start;
public enum State { RESET, REWIND, NOMATCH, PARTMATCH, FULLMATCH };
private class NFAPart {
KeyEventState state;
int iVisibleSequenceLength;
int iSequenceLength;
private int resultChar;
private int sequenceLength;
private int visibleSequenceLength;
NFAPart() {
this.reset();
}
void reset() {
this.state = KeyEventStateMachine.this.start;
this.iSequenceLength = 0;
this.iVisibleSequenceLength = 0;
}
void reset(NFAPart part) {
this.state = part.state;
this.iSequenceLength = part.iSequenceLength;
this.iVisibleSequenceLength = part.iVisibleSequenceLength;
}
private void returnToFirst(int keyCode) {
this.state = KeyEventStateMachine.this.start;
if (keyCode > 0)
this.iVisibleSequenceLength
this.iSequenceLength
}
private State addKeyCode(int keyCode) {
this.state = this.state.getNext(keyCode);
if (this.state == null) {
this.reset();
return State.RESET;
}
if (keyCode > 0)
this.iVisibleSequenceLength++;
this.iSequenceLength++;
if (this.state.result != 0) {
this.resultChar = this.state.result;
this.sequenceLength = this.iSequenceLength;
this.visibleSequenceLength = this.iVisibleSequenceLength;
if (this.resultChar == KEYCODE_FIRST_CHAR) {
return State.REWIND;
}
if (!this.state.hasNext()) {
this.reset();
return State.FULLMATCH;
}
return State.PARTMATCH;
}
return State.NOMATCH;
}
}
private static final int MAX_NFA_DIVIDES = 30;
class RingBuffer {
private NFAPart[] buffer;
private int start;
private int end;
private int count;
RingBuffer() {
this.buffer = new NFAPart[MAX_NFA_DIVIDES];
this.start = 0;
this.end = 0;
this.count = 0;
}
boolean hasItem() {
return this.count > 0;
}
NFAPart getItem() {
assert (this.count > 0);
NFAPart result = this.buffer[this.start];
this.buffer[this.start] = null;
this.start = (this.start + 1) % MAX_NFA_DIVIDES;
this.count
return result;
}
void putItem(NFAPart item) {
assert (this.count < MAX_NFA_DIVIDES);
this.buffer[this.end] = item;
this.end = (this.end + 1) % MAX_NFA_DIVIDES;
this.count++;
}
int getCount() {
return this.count;
}
}
private RingBuffer walker;
private RingBuffer walkerhelper;
private RingBuffer walkerunused;
private int sequenceLength;
private int resultChar;
public KeyEventStateMachine() {
this.start = new KeyEventState();
this.walker = new RingBuffer();
this.walker.putItem(new NFAPart());
this.walkerunused = new RingBuffer();
for (int i = 1; i < MAX_NFA_DIVIDES; i++)
this.walkerunused.putItem(new NFAPart());
this.walkerhelper = new RingBuffer();
}
private KeyEventState addNextState(KeyEventState current, int keyCode) {
KeyEventState next = current.getNext(keyCode);
if (next != null)
return next;
next = new KeyEventState();
current.addNextState(keyCode, next);
return next;
}
private KeyEventState addSpecialKeyNextState(KeyEventState current, int keyCode, int specialKey) {
KeyEventState next = this.addNextState(current, keyCode);
KeyEventState spnext = this.addNextState(current, specialKey);
spnext.addNextState(keyCode, next);
return next;
}
public void addSequence(int[] sequence, int result) {
KeyEventState c = this.start;
for (int i = 0; i < sequence.length; i++) {
c = this.addNextState(c, sequence[i]);
}
c.setCharacter(result);
}
public void addSpecialKeySequence(int[] sequence, int specialKey, int result) {
KeyEventState c = this.addNextState(this.start, specialKey);
for (int i = 0; i < sequence.length; i++) {
c = this.addSpecialKeyNextState(c, sequence[i], specialKey);
}
c.setCharacter(result);
}
public State addKeyCode(int keyCode) {
this.sequenceLength = 0;
this.resultChar = 0;
NFAPart found = null;
State resultstate = State.RESET;
if (!this.walker.hasItem()) {
NFAPart part = this.walkerunused.getItem();
part.reset();
this.walker.putItem(part);
}
while (this.walker.hasItem()) {
NFAPart cWalker = this.walker.getItem();
State result = cWalker.addKeyCode(keyCode);
if (result == State.REWIND) {
if (this.walkerunused.hasItem()) {
NFAPart newwalker = this.walkerunused.getItem();
newwalker.reset(cWalker);
this.walkerhelper.putItem(newwalker);
}
cWalker.returnToFirst(keyCode);
result = cWalker.addKeyCode(keyCode);
}
if (result == State.FULLMATCH) {
if (found == null) {
this.walkerhelper.putItem(cWalker);
resultstate = result;
found = cWalker;
break;
}
}
if (result == State.PARTMATCH || result == State.NOMATCH) {
if (resultstate == State.RESET)
resultstate = result;
this.walkerhelper.putItem(cWalker);
} else {
this.walkerunused.putItem(cWalker);
}
if (result == State.PARTMATCH) {
if (this.walkerunused.hasItem()) {
NFAPart newwalker = this.walkerunused.getItem();
newwalker.reset();
this.walkerhelper.putItem(newwalker);
}
}
if (result == State.PARTMATCH) {
if ((found == null) || (found.sequenceLength < cWalker.sequenceLength)) {
found = cWalker;
resultstate = result;
}
}
}
while (this.walker.hasItem())
this.walkerunused.putItem(this.walker.getItem());
final RingBuffer switchWalkerarrays = this.walkerhelper;
this.walkerhelper = this.walker;
this.walker = switchWalkerarrays;
if (found != null) {
this.sequenceLength = found.visibleSequenceLength;
this.resultChar = found.resultChar;
int i = 0;
final int count = this.walker.getCount();
while (i < count) {
NFAPart part = this.walker.getItem();
this.walker.putItem(part);
i++;
if (part == found && resultstate == State.FULLMATCH)
break;
if (found.visibleSequenceLength > 1) {
part.iVisibleSequenceLength -= found.visibleSequenceLength-1;
}
if (part == found)
break;
}
while (i++ < count) {
this.walker.putItem(this.walker.getItem());
}
}
return resultstate;
}
public int getCharacter() {
return this.resultChar;
}
public int getSequenceLength() {
return this.sequenceLength;
}
public void reset() {
while (this.walker.hasItem())
this.walkerunused.putItem(this.walker.getItem());
NFAPart first = this.walkerunused.getItem();
first.reset();
this.walker.putItem(first);
}
}
|
package org.jgroups.protocols.pbcast;
import org.jgroups.*;
import org.jgroups.annotations.DeprecatedProperty;
import org.jgroups.annotations.GuardedBy;
import org.jgroups.annotations.MBean;
import org.jgroups.annotations.ManagedAttribute;
import org.jgroups.annotations.ManagedOperation;
import org.jgroups.annotations.Property;
import org.jgroups.stack.Protocol;
import org.jgroups.util.Digest;
import org.jgroups.util.Promise;
import org.jgroups.util.Streamable;
import org.jgroups.util.Util;
import java.io.*;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Flush, as it name implies, forces group members to flush their pending
* messages while blocking them to send any additional messages. The process of
* flushing acquiesces the group so that state transfer or a join can be done.
* It is also called stop-the-world model as nobody will be able to send
* messages while a flush is in process.
*
* <p>
* Flush is needed for:
* <p>
* (1) State transfer. When a member requests state transfer, the coordinator
* tells everyone to stop sending messages and waits for everyone's ack. Then it
* asks the application for its state and ships it back to the requester. After
* the requester has received and set the state successfully, the coordinator
* tells everyone to resume sending messages.
* <p>
* (2) View changes (e.g.a join). Before installing a new view V2, flushing
* would ensure that all messages *sent* in the current view V1 are indeed
* *delivered* in V1, rather than in V2 (in all non-faulty members). This is
* essentially Virtual Synchrony.
*
*
*
* @author Vladimir Blagojevic
* @version $Id$
* @since 2.4
*/
@MBean(description="Flushes the cluster")
@DeprecatedProperty(names={"auto_flush_conf"})
public class FLUSH extends Protocol {
public static final String NAME = "FLUSH";
@Property(description="Max time to keep channel blocked in flush. Default is 8000 msec")
private long timeout = 8000;
@Property(description="Timeout (per atttempt) to quiet the cluster during the first flush phase. Default is 2500 msec")
private long start_flush_timeout = 2000;
@Property(description="Retry timeout after an unsuccessful attempt to quiet the cluster (first flush phase). Default is 3000 msec")
private long retry_timeout = 2000;
@Property(description="Reconcilliation phase toggle. Default is true")
private boolean enable_reconciliation = true;
private long startFlushTime;
private long totalTimeInFlush;
private int numberOfFlushes;
private double averageFlushDuration;
@GuardedBy("sharedLock")
private View currentView;
private Address localAddress;
/**
* Group member that requested FLUSH. For view installations flush
* coordinator is the group coordinator For state transfer flush coordinator
* is the state requesting member
*/
@GuardedBy("sharedLock")
private Address flushCoordinator;
@GuardedBy("sharedLock")
private final List<Address> flushMembers;
private final AtomicInteger viewCounter = new AtomicInteger(0);
@GuardedBy("sharedLock")
private final Map<Address, Digest> flushCompletedMap;
@GuardedBy("sharedLock")
private final List<Address> flushNotCompletedMap;
@GuardedBy("sharedLock")
private final Set<Address> suspected;
@GuardedBy("sharedLock")
private final List<Address> reconcileOks;
private final Object sharedLock = new Object();
private final Object blockMutex = new Object();
/**
* Indicates if FLUSH.down() is currently blocking threads Condition
* predicate associated with blockMutex
*/
@GuardedBy("blockMutex")
private volatile boolean isBlockingFlushDown = true;
@GuardedBy("sharedLock")
private boolean flushCompleted = false;
private final Promise<Boolean> flush_promise = new Promise<Boolean>();
private final AtomicBoolean flushInProgress = new AtomicBoolean(false);
private final AtomicBoolean sentBlock = new AtomicBoolean(false);
private final AtomicBoolean sentUnblock = new AtomicBoolean(false);
public FLUSH(){
super();
currentView = new View(new ViewId(), new Vector<Address>());
flushCompletedMap = new HashMap<Address, Digest>();
flushNotCompletedMap = new ArrayList<Address>();
reconcileOks = new ArrayList<Address>();
flushMembers = new ArrayList<Address>();
suspected = new TreeSet<Address>();
}
public String getName() {
return NAME;
}
public long getStartFlushTimeout() {
return start_flush_timeout;
}
public void setStartFlushTimeout(long start_flush_timeout) {
this.start_flush_timeout=start_flush_timeout;
}
public long getRetryTimeout() {
return retry_timeout;
}
public void setRetryTimeout(long retry_timeout) {
this.retry_timeout=retry_timeout;
}
public void start() throws Exception {
Map<String, Object> map = new HashMap<String, Object>();
map.put("flush_supported", Boolean.TRUE);
up_prot.up(new Event(Event.CONFIG, map));
down_prot.down(new Event(Event.CONFIG, map));
viewCounter.set(0);
synchronized(blockMutex){
isBlockingFlushDown = true;
}
}
public void stop() {
synchronized(sharedLock){
currentView = new View(new ViewId(), new Vector<Address>());
flushCompletedMap.clear();
flushNotCompletedMap.clear();
flushMembers.clear();
suspected.clear();
flushCoordinator = null;
}
}
@ManagedAttribute
public double getAverageFlushDuration() {
return averageFlushDuration;
}
@ManagedAttribute
public long getTotalTimeInFlush() {
return totalTimeInFlush;
}
@ManagedAttribute
public int getNumberOfFlushes() {
return numberOfFlushes;
}
@ManagedOperation(description="Request cluster flush")
public boolean startFlush() {
return startFlush(new Event(Event.SUSPEND));
}
private boolean startFlush(Event evt){
if(log.isDebugEnabled())
log.debug("Received " + evt + " at " + localAddress + ". Running FLUSH...");
List<Address> flushParticipants = (List<Address>) evt.getArg();
return startFlush(flushParticipants);
}
private boolean startFlush(List<Address> flushParticipants) {
boolean successfulFlush = false;
if (!flushInProgress.get()) {
flush_promise.reset();
onSuspend(flushParticipants);
try {
Boolean r = flush_promise.getResultWithTimeout(start_flush_timeout);
successfulFlush = r.booleanValue();
} catch (TimeoutException e) {
if (log.isDebugEnabled())
log.debug("At " + localAddress
+ " timed out waiting for flush responses after "
+ start_flush_timeout + " msec. Rejecting flush to participants " + flushParticipants);
rejectFlush(flushParticipants, currentViewId());
}
}
return successfulFlush;
}
@ManagedOperation(description="Request end of flush in a cluster")
public void stopFlush() {
down(new Event(Event.RESUME));
}
public Object down(Event evt) {
switch(evt.getType()){
case Event.MSG:
Message msg = (Message) evt.getArg();
Address dest = msg.getDest();
if(dest == null || dest.isMulticastAddress()){
//mcasts
FlushHeader fh = (FlushHeader) msg.getHeader(getName());
if(fh != null && fh.type == FlushHeader.FLUSH_BYPASS){
return down_prot.down(evt);
}
else{
blockMessageDuringFlush();
}
}else{
//unicasts are irrelevant in virtual synchrony, let them through
return down_prot.down(evt);
}
break;
case Event.CONNECT:
case Event.CONNECT_WITH_STATE_TRANSFER:
case Event.CONNECT_USE_FLUSH:
case Event.CONNECT_WITH_STATE_TRANSFER_USE_FLUSH:
if(sentBlock.compareAndSet(false, true)){
sendBlockUpToChannel();
}
Object result=down_prot.down(evt);
if(result instanceof Throwable) {
sentBlock.set(false); // set the var back to its original state if we cannot connect successfully
}
return result;
case Event.SUSPEND:
return startFlush(evt);
case Event.RESUME:
onResume(evt);
return null;
case Event.SET_LOCAL_ADDRESS:
localAddress = (Address) evt.getArg();
break;
}
return down_prot.down(evt);
}
private void blockMessageDuringFlush() {
boolean shouldSuspendByItself = false;
long start = 0, stop = 0;
synchronized(blockMutex){
while(isBlockingFlushDown){
if(log.isDebugEnabled())
log.debug("FLUSH block at " + localAddress
+ " for "
+ (timeout <= 0 ? "ever" : timeout + "ms"));
try{
start = System.currentTimeMillis();
if(timeout <= 0)
blockMutex.wait();
else
blockMutex.wait(timeout);
stop = System.currentTimeMillis();
}catch(InterruptedException e){
Thread.currentThread().interrupt(); // set interrupt flag again
}
if(isBlockingFlushDown){
isBlockingFlushDown = false;
shouldSuspendByItself = true;
blockMutex.notifyAll();
}
}
}
if(shouldSuspendByItself){
log.warn("unblocking FLUSH.down() at " + localAddress
+ " after timeout of "
+ (stop - start)
+ "ms");
flush_promise.setResult(Boolean.TRUE);
}
}
public Object up(Event evt) {
switch(evt.getType()){
case Event.MSG:
Message msg = (Message) evt.getArg();
final FlushHeader fh = (FlushHeader) msg.getHeader(getName());
if(fh != null){
switch(fh.type){
case FlushHeader.FLUSH_BYPASS:
return up_prot.up(evt);
case FlushHeader.START_FLUSH:
Collection<? extends Address> fp=fh.flushParticipants;
boolean amIParticipant = (fp != null && fp.contains(localAddress)) || msg.getSrc().equals(localAddress);
if(amIParticipant){
handleStartFlush(msg, fh);
}
else{
if (log.isDebugEnabled())
log.debug("Received START_FLUSH at " + localAddress
+ " but I am not flush participant, not responding");
}
break;
case FlushHeader.FLUSH_RECONCILE:
handleFlushReconcile(msg, fh);
break;
case FlushHeader.FLUSH_RECONCILE_OK:
onFlushReconcileOK(msg);
break;
case FlushHeader.STOP_FLUSH:
onStopFlush();
break;
case FlushHeader.ABORT_FLUSH:
Collection<? extends Address> flushParticipants = fh.flushParticipants;
if(flushParticipants != null && flushParticipants.contains(localAddress)){
if (log.isDebugEnabled()) {
log.debug("At " + localAddress
+ " received ABORT_FLUSH from flush coordinator " + msg.getSrc()
+ ", am i flush participant="
+ flushParticipants.contains(localAddress));
}
flushInProgress.set(false);
flushNotCompletedMap.clear();
flushCompletedMap.clear();
}
break;
case FlushHeader.FLUSH_NOT_COMPLETED:
if (log.isDebugEnabled()) {
log.debug("At " + localAddress
+ " received FLUSH_NOT_COMPLETED from "
+ msg.getSrc());
}
boolean flushCollision = false;
synchronized(sharedLock){
flushNotCompletedMap.add(msg.getSrc());
flushCollision = !flushCompletedMap.isEmpty();
if(flushCollision){
flushNotCompletedMap.clear();
flushCompletedMap.clear();
}
}
if (log.isDebugEnabled()) {
log.debug("At " + localAddress
+ " received FLUSH_NOT_COMPLETED from "
+ msg.getSrc() + " collision=" + flushCollision);
}
//reject flush if we have at least one OK and at least one FAIL
if(flushCollision){
Runnable r = new Runnable(){
public void run() {
//wait a bit so ABORTs do not get received before other possible FLUSH_COMPLETED
Util.sleep(1000);
rejectFlush(fh.flushParticipants, fh.viewID);
}
};
new Thread(r).start();
}
//however, flush should fail/retry as soon as one FAIL is received
flush_promise.setResult(Boolean.FALSE);
break;
case FlushHeader.FLUSH_COMPLETED:
if(isCurrentFlushMessage(fh))
onFlushCompleted(msg.getSrc(), fh);
break;
}
return null; // do not pass FLUSH msg up
}else{
// for processing of application messages after we join,
// lets wait for STOP_FLUSH to complete
// before we start allowing message up.
Address dest=msg.getDest();
if(dest != null && !dest.isMulticastAddress()) {
return up_prot.up(evt); // allow unicasts to pass, virtual synchrony olny applies to multicasts
}
}
break;
case Event.VIEW_CHANGE:
/*
* [JGRP-618] - FLUSH coordinator transfer reorders
* block/unblock/view events in applications (TCP stack only)
*
*/
up_prot.up(evt);
View newView = (View) evt.getArg();
boolean coordinatorLeft = onViewChange(newView);
boolean singletonMember = newView.size() == 1 && newView.containsMember(localAddress);
boolean isThisOurFirstView = viewCounter.addAndGet(1) == 1;
// if this is channel's first view and its the only member of the group - no flush was run
// but the channel application should still receive BLOCK,VIEW,UNBLOCK
//also if coordinator of flush left each member should run stopFlush individually.
if((isThisOurFirstView && singletonMember) || coordinatorLeft){
onStopFlush();
}
return null;
case Event.TMP_VIEW:
/*
* April 25, 2007
*
* Accommodating current NAKACK (1.127)
*
* Updates field currentView of a leaving coordinator. Leaving
* coordinator, after it sends out the view, does not need to
* participate in second flush phase.
*
* see onStopFlush();
*
* TODO: revisit if still needed post NAKACK 1.127
*
*/
View tmpView = (View) evt.getArg();
if(!tmpView.containsMember(localAddress)){
onViewChange(tmpView);
}
break;
case Event.SUSPECT:
onSuspect((Address) evt.getArg());
break;
case Event.SUSPEND:
return startFlush(evt);
case Event.RESUME:
onResume(evt);
return null;
}
return up_prot.up(evt);
}
private void onFlushReconcileOK(Message msg) {
if(log.isDebugEnabled())
log.debug(localAddress + " received reconcile ok from " + msg.getSrc());
synchronized(sharedLock){
reconcileOks.add(msg.getSrc());
if(reconcileOks.size() >= flushMembers.size()){
flush_promise.setResult(Boolean.TRUE);
if(log.isDebugEnabled())
log.debug("All FLUSH_RECONCILE_OK received at " + localAddress);
}
}
}
private void handleFlushReconcile(Message msg, FlushHeader fh) {
Address requester = msg.getSrc();
Digest reconcileDigest = fh.digest;
if(log.isDebugEnabled())
log.debug("Received FLUSH_RECONCILE at " + localAddress
+ " passing digest to NAKACK "
+ reconcileDigest);
// Let NAKACK reconcile missing messages
down_prot.down(new Event(Event.REBROADCAST, reconcileDigest));
if(log.isDebugEnabled())
log.debug("Returned from FLUSH_RECONCILE at " + localAddress
+ " Sending RECONCILE_OK to "
+ requester
+ ", thread "
+ Thread.currentThread());
Message reconcileOk = new Message(requester);
reconcileOk.setFlag(Message.OOB);
reconcileOk.putHeader(getName(), new FlushHeader(FlushHeader.FLUSH_RECONCILE_OK));
down_prot.down(new Event(Event.MSG, reconcileOk));
}
private void handleStartFlush(Message msg, FlushHeader fh) {
Address flushRequester = msg.getSrc();
boolean proceed = flushInProgress.compareAndSet(false, true);
if (proceed) {
synchronized (sharedLock) {
flushCoordinator = flushRequester;
}
onStartFlush(flushRequester, fh);
}
else{
FlushHeader fhr=new FlushHeader(FlushHeader.FLUSH_NOT_COMPLETED, fh.viewID,fh.flushParticipants);
Message response=new Message(flushRequester);
response.putHeader(getName(), fhr);
down_prot.down(new Event(Event.MSG, response));
if(log.isDebugEnabled())
log.debug("Received START_FLUSH at " + localAddress
+ " responded with FLUSH_NOT_COMPLETED to "
+ flushRequester);
}
}
private void rejectFlush(Collection<? extends Address> participants,long viewId) {
for(Address flushMember:participants){
Message reject = new Message(flushMember, localAddress, null);
reject.putHeader(getName(), new FlushHeader(FlushHeader.ABORT_FLUSH,viewId,participants));
down_prot.down(new Event(Event.MSG, reject));
}
}
public Vector<Integer> providedDownServices() {
Vector<Integer> retval = new Vector<Integer>(2);
retval.addElement(new Integer(Event.SUSPEND));
retval.addElement(new Integer(Event.RESUME));
return retval;
}
private void sendBlockUpToChannel() {
up_prot.up(new Event(Event.BLOCK));
sentUnblock.set(false);
}
private void sendUnBlockUpToChannel() {
sentBlock.set(false);
up_prot.up(new Event(Event.UNBLOCK));
}
private boolean isCurrentFlushMessage(FlushHeader fh) {
return fh.viewID == currentViewId();
}
private long currentViewId() {
long viewId = -1;
synchronized(sharedLock){
ViewId view = currentView.getVid();
if(view != null){
viewId = view.getId();
}
}
return viewId;
}
private boolean onViewChange(View view) {
boolean coordinatorLeft = false;
View oldView;
synchronized(sharedLock){
suspected.retainAll(view.getMembers());
oldView = currentView;
currentView = view;
coordinatorLeft = !oldView.getMembers().isEmpty() &&
!view.getMembers().isEmpty() &&
!view.containsMember(oldView.getCreator());
}
if(log.isDebugEnabled())
log.debug("Installing view at " + localAddress + " view is " + view);
return coordinatorLeft;
}
private void onStopFlush() {
if(stats){
long stopFlushTime = System.currentTimeMillis();
totalTimeInFlush += (stopFlushTime - startFlushTime);
if(numberOfFlushes > 0){
averageFlushDuration = totalTimeInFlush / (double) numberOfFlushes;
}
}
synchronized(sharedLock){
flushCompletedMap.clear();
flushNotCompletedMap.clear();
flushMembers.clear();
suspected.clear();
flushCoordinator = null;
flushCompleted = false;
}
if(log.isDebugEnabled())
log.debug("At " + localAddress
+ " received STOP_FLUSH, unblocking FLUSH.down() and sending UNBLOCK up");
synchronized(blockMutex){
isBlockingFlushDown = false;
blockMutex.notifyAll();
}
if(sentUnblock.compareAndSet(false,true)){
//ensures that we do not repeat unblock event
sendUnBlockUpToChannel();
}
flushInProgress.set(false);
}
private void onSuspend(List<Address> members) {
Message msg = null;
Collection<Address> participantsInFlush = null;
synchronized(sharedLock){
// start FLUSH only on group members that we need to flush
if(members != null){
participantsInFlush = members;
participantsInFlush.retainAll(currentView.getMembers());
}else{
participantsInFlush = new ArrayList<Address>(currentView.getMembers());
}
msg = new Message(null, localAddress, null);
msg.putHeader(getName(), new FlushHeader(FlushHeader.START_FLUSH,
currentViewId(),
participantsInFlush));
}
if(participantsInFlush.isEmpty()){
flush_promise.setResult(Boolean.TRUE);
}else{
down_prot.down(new Event(Event.MSG, msg));
if(log.isDebugEnabled())
log.debug("Flush coordinator " + localAddress
+ " is starting FLUSH with participants "
+ participantsInFlush);
}
}
private void onResume(Event evt) {
List<Address> members = (List<Address>) evt.getArg();
long viewID = currentViewId();
if(members == null || members.isEmpty()){
Message msg = new Message(null, localAddress, null);
//Cannot be OOB since START_FLUSH is not OOB
//we have to FIFO order two subsequent flushes
msg.putHeader(getName(), new FlushHeader(FlushHeader.STOP_FLUSH, viewID));
down_prot.down(new Event(Event.MSG, msg));
if(log.isDebugEnabled())
log.debug("Received RESUME at " + localAddress + ", sent STOP_FLUSH to all");
}else{
for (Address address : members) {
Message msg = new Message(address, localAddress, null);
//Cannot be OOB since START_FLUSH is not OOB
//we have to FIFO order two subsequent flushes
msg.putHeader(getName(), new FlushHeader(FlushHeader.STOP_FLUSH, viewID));
down_prot.down(new Event(Event.MSG, msg));
if(log.isDebugEnabled())
log.debug("Received RESUME at " + localAddress + ", sent STOP_FLUSH to " + address);
}
}
}
private void onStartFlush(Address flushStarter, FlushHeader fh) {
if(stats){
startFlushTime = System.currentTimeMillis();
numberOfFlushes += 1;
}
boolean proceed = false;
synchronized(sharedLock){
flushCoordinator = flushStarter;
flushMembers.clear();
if(fh.flushParticipants != null){
flushMembers.addAll(fh.flushParticipants);
}
proceed = flushMembers.contains(localAddress);
flushMembers.removeAll(suspected);
}
if(proceed) {
if(sentBlock.compareAndSet(false, true)) {
//ensures that we do not repeat block event
//and that we do not send block event to non participants
sendBlockUpToChannel();
synchronized(blockMutex) {
isBlockingFlushDown=true;
}
}
else {
if(log.isDebugEnabled())
log.debug("Received START_FLUSH at " + localAddress
+ " but not sending BLOCK up");
}
Digest digest=(Digest)down_prot.down(new Event(Event.GET_DIGEST));
FlushHeader fhr=new FlushHeader(FlushHeader.FLUSH_COMPLETED, fh.viewID,fh.flushParticipants);
fhr.addDigest(digest);
Message msg=new Message(flushStarter);
msg.putHeader(getName(), fhr);
down_prot.down(new Event(Event.MSG, msg));
if(log.isDebugEnabled())
log.debug("Received START_FLUSH at " + localAddress
+ " responded with FLUSH_COMPLETED to "
+ flushStarter);
}
}
private void onFlushCompleted(Address address, final FlushHeader header) {
Message msg = null;
boolean needsReconciliationPhase = false;
boolean collision = false;
Digest digest = header.digest;
synchronized(sharedLock){
flushCompletedMap.put(address, digest);
flushCompleted = flushCompletedMap.size() >= flushMembers.size()
&& !flushMembers.isEmpty()
&& flushCompletedMap.keySet().containsAll(flushMembers);
collision = !flushNotCompletedMap.isEmpty();
if(log.isDebugEnabled())
log.debug("At " + localAddress
+ " FLUSH_COMPLETED from "
+ address
+ ",completed "
+ flushCompleted
+ ",flushMembers "
+ flushMembers
+ ",flushCompleted "
+ flushCompletedMap.keySet());
needsReconciliationPhase = enable_reconciliation && flushCompleted
&& hasVirtualSynchronyGaps();
if(needsReconciliationPhase){
Digest d = findHighestSequences();
msg = new Message();
msg.setFlag(Message.OOB);
FlushHeader fh = new FlushHeader(FlushHeader.FLUSH_RECONCILE,
currentViewId(),
flushMembers);
reconcileOks.clear();
fh.addDigest(d);
msg.putHeader(getName(), fh);
if(log.isDebugEnabled())
log.debug("At "+ localAddress + " reconciling flush mebers due to virtual synchrony gap, digest is " + d
+ " flush members are "
+ flushMembers);
flushCompletedMap.clear();
} else if (flushCompleted){
flushCompletedMap.clear();
} else if (collision){
flushNotCompletedMap.clear();
flushCompletedMap.clear();
}
}
if(needsReconciliationPhase){
down_prot.down(new Event(Event.MSG, msg));
}else if(flushCompleted){
flush_promise.setResult(Boolean.TRUE);
if(log.isDebugEnabled())
log.debug("All FLUSH_COMPLETED received at " + localAddress);
}else if(collision){
//reject flush if we have at least one OK and at least one FAIL
Runnable r = new Runnable(){
public void run() {
//wait a bit so ABORTs do not get received before other possible FLUSH_COMPLETED
Util.sleep(1000);
rejectFlush(header.flushParticipants, header.viewID);
}
};
new Thread(r).start();
}
}
private boolean hasVirtualSynchronyGaps() {
ArrayList<Digest> digests = new ArrayList<Digest>();
digests.addAll(flushCompletedMap.values());
Digest firstDigest = digests.get(0);
List<Digest> remainingDigests = digests.subList(1, digests.size());
for(Digest digest:remainingDigests){
Digest diff = firstDigest.difference(digest);
if(diff != Digest.EMPTY_DIGEST){
return true;
}
}
return false;
}
private Digest findHighestSequences() {
Digest result = null;
List<Digest> digests = new ArrayList<Digest>(flushCompletedMap.values());
result = digests.get(0);
List<Digest> remainingDigests = digests.subList(1, digests.size());
for(Digest digestG:remainingDigests){
result = result.highestSequence(digestG);
}
return result;
}
private void onSuspect(Address address) {
//handles FlushTest#testFlushWithCrashedFlushCoordinator
boolean amINeighbourOfCrashedFlushCoordinator = false;
ArrayList<Address> flushMembersCopy = null;
synchronized(sharedLock){
boolean flushCoordinatorSuspected = address.equals(flushCoordinator);
if(flushCoordinatorSuspected && flushMembers != null){
int indexOfCoordinator = flushMembers.indexOf(flushCoordinator);
int myIndex = flushMembers.indexOf(localAddress);
int diff = myIndex - indexOfCoordinator;
amINeighbourOfCrashedFlushCoordinator = (diff == 1 || (myIndex==0 && indexOfCoordinator == flushMembers.size()));
if(amINeighbourOfCrashedFlushCoordinator){
flushMembersCopy = new ArrayList<Address>(flushMembers);
}
}
}
if(amINeighbourOfCrashedFlushCoordinator){
if(log.isDebugEnabled())
log.debug("Flush coordinator " + flushCoordinator + " suspected, " + localAddress + " is neighbour, completing flush ");
onResume(new Event(Event.RESUME, flushMembersCopy));
}
//handles FlushTest#testFlushWithCrashedNonCoordinators
boolean flushOkCompleted = false;
Message m = null;
long viewID = 0;
synchronized(sharedLock){
suspected.add(address);
flushMembers.removeAll(suspected);
viewID = currentViewId();
flushOkCompleted = !flushCompletedMap.isEmpty() && flushCompletedMap.keySet().containsAll(flushMembers);
if(flushOkCompleted){
m = new Message(flushCoordinator, localAddress, null);
}
if(log.isDebugEnabled())
log.debug("Suspect is " + address
+ ",completed "
+ flushOkCompleted
+ ", flushOkSet "
+ flushCompletedMap
+ " flushMembers "
+ flushMembers);
}
if(flushOkCompleted){
Digest digest = (Digest) down_prot.down(new Event(Event.GET_DIGEST));
FlushHeader fh = new FlushHeader(FlushHeader.FLUSH_COMPLETED, viewID);
fh.addDigest(digest);
m.putHeader(getName(), fh);
down_prot.down(new Event(Event.MSG, m));
if(log.isDebugEnabled())
log.debug(localAddress + " sent FLUSH_COMPLETED message to " + flushCoordinator);
}
}
public static class FlushHeader extends Header implements Streamable {
public static final byte START_FLUSH = 0;
public static final byte STOP_FLUSH = 2;
public static final byte FLUSH_COMPLETED = 3;
public static final byte ABORT_FLUSH = 5;
public static final byte FLUSH_BYPASS = 6;
public static final byte FLUSH_RECONCILE = 7;
public static final byte FLUSH_RECONCILE_OK = 8;
public static final byte FLUSH_NOT_COMPLETED = 9;
byte type;
long viewID;
Collection<? extends Address> flushParticipants;
Digest digest = null;
private static final long serialVersionUID=-6248843990215637687L;
public FlushHeader(){
this(START_FLUSH, 0);
} // used for externalization
public FlushHeader(byte type){
this(type, 0);
}
public FlushHeader(byte type,long viewID){
this(type, viewID, null);
}
public FlushHeader(byte type,long viewID,Collection<? extends Address> flushView){
this.type = type;
this.viewID = viewID;
if(flushView != null){
this.flushParticipants = new ArrayList<Address>(flushView);
}
}
@Override
public int size() {
int retval=Global.BYTE_SIZE; // type
retval+=Global.LONG_SIZE; // viewID
retval+= Util.size(flushParticipants);
retval+=Global.BYTE_SIZE; // presence for digest
if(digest != null){
retval += digest.serializedSize();
}
return retval;
}
public void addDigest(Digest digest) {
this.digest = digest;
}
public String toString() {
switch(type){
case START_FLUSH:
return "FLUSH[type=START_FLUSH,viewId=" + viewID
+ ",members="
+ flushParticipants
+ "]";
case STOP_FLUSH:
return "FLUSH[type=STOP_FLUSH,viewId=" + viewID + "]";
case ABORT_FLUSH:
return "FLUSH[type=ABORT_FLUSH,viewId=" + viewID + "]";
case FLUSH_COMPLETED:
return "FLUSH[type=FLUSH_COMPLETED,viewId=" + viewID + "]";
case FLUSH_BYPASS:
return "FLUSH[type=FLUSH_BYPASS,viewId=" + viewID + "]";
case FLUSH_RECONCILE:
return "FLUSH[type=FLUSH_RECONCILE,viewId=" + viewID + ",digest=" + digest + "]";
case FLUSH_RECONCILE_OK:
return "FLUSH[type=FLUSH_RECONCILE_OK,viewId=" + viewID + "]";
default:
return "[FLUSH: unknown type (" + type + ")]";
}
}
public void writeExternal(ObjectOutput out) throws IOException {
out.writeByte(type);
out.writeLong(viewID);
out.writeObject(flushParticipants);
out.writeObject(digest);
}
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
type = in.readByte();
viewID = in.readLong();
flushParticipants = (Collection<Address>) in.readObject();
digest = (Digest) in.readObject();
}
public void writeTo(DataOutputStream out) throws IOException {
out.writeByte(type);
out.writeLong(viewID);
Util.writeAddresses(flushParticipants, out);
Util.writeStreamable(digest, out);
}
public void readFrom(DataInputStream in) throws IOException,
IllegalAccessException,
InstantiationException {
type = in.readByte();
viewID = in.readLong();
flushParticipants = Util.readAddresses(in, ArrayList.class);
digest = (Digest) Util.readStreamable(Digest.class, in);
}
}
}
|
package sagan;
import javax.sql.DataSource;
import org.springframework.cloud.Cloud;
import org.springframework.cloud.CloudFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.util.Assert;
import com.googlecode.flyway.core.Flyway;
public abstract class DatabaseConfig {
public static final String CACHE_NAME = "cache.database";
public static final String CACHE_TTL = "${cache.database.timetolive:60}";
@Bean
public abstract DataSource dataSource();
protected void configureDataSource(org.apache.tomcat.jdbc.pool.DataSource dataSource) {
dataSource.setMaxActive(20);
dataSource.setMaxIdle(8);
dataSource.setMinIdle(8);
dataSource.setTestOnBorrow(false);
dataSource.setTestOnReturn(false);
this.migrateSchema(dataSource);
}
protected void migrateSchema(DataSource dataSource) {
Flyway flyway = new Flyway();
flyway.setLocations("database");
flyway.setDataSource(dataSource);
flyway.migrate();
}
}
@Configuration
@Profile(SaganProfiles.STANDALONE)
class StandaloneDatabaseConfig extends DatabaseConfig {
@Bean
public DataSource dataSource() {
org.apache.tomcat.jdbc.pool.DataSource dataSource = new org.apache.tomcat.jdbc.pool.DataSource();
dataSource.setDriverClassName("org.h2.Driver");
dataSource.setUrl("jdbc:h2:mem:sagan;MODE=PostgreSQL");
dataSource.setUsername("sa");
dataSource.setPassword("");
dataSource.setValidationQuery("SELECT 1");
configureDataSource(dataSource);
return dataSource;
}
}
@Configuration
@Profile(SaganProfiles.CLOUDFOUNDRY)
class CloudFoundryDatabaseConfig extends DatabaseConfig {
@Bean
public Cloud cloud() {
return new CloudFactory().getCloud();
}
@Bean
public DataSource dataSource() {
DataSource dataSource = cloud().getServiceConnector("sagan-db", DataSource.class, null);
Assert.isInstanceOf(org.apache.tomcat.jdbc.pool.DataSource.class, dataSource);
configureDataSource((org.apache.tomcat.jdbc.pool.DataSource) dataSource);
return dataSource;
}
}
|
package ca.cumulonimbus.pressurenetsdk;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Date;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.location.Location;
import android.net.ConnectivityManager;
import android.os.AsyncTask;
import android.os.BatteryManager;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.Messenger;
import android.os.RemoteException;
import android.os.SystemClock;
import android.preference.PreferenceManager;
import android.provider.Settings.Secure;
import android.widget.Toast;
/**
* Represent developer-facing pressureNET API Background task; manage and run
* everything Handle Intents
*
* @author jacob
*
*/
public class CbService extends Service {
private CbDataCollector dataCollector;
private CbLocationManager locationManager;
private CbSettingsHandler settingsHandler;
private CbDb db;
public CbService service = this;
private String mAppDir;
IBinder mBinder;
ReadingSender sender;
Message recentMsg;
String serverURL = "https://pressurenet.cumulonimbus.ca/";
private int runningCount = 0;
public static String ACTION_SEND_MEASUREMENT = "SendMeasurement";
// Service Interaction API Messages
public static final int MSG_OKAY = 0;
public static final int MSG_STOP = 1;
public static final int MSG_GET_BEST_LOCATION = 2;
public static final int MSG_BEST_LOCATION = 3;
public static final int MSG_GET_BEST_PRESSURE = 4;
public static final int MSG_BEST_PRESSURE = 5;
public static final int MSG_START_AUTOSUBMIT = 6;
public static final int MSG_STOP_AUTOSUBMIT = 7;
public static final int MSG_SET_SETTINGS = 8;
public static final int MSG_GET_SETTINGS = 9;
public static final int MSG_SETTINGS = 10;
public static final int MSG_START_DATA_STREAM = 11;
public static final int MSG_DATA_STREAM = 12;
public static final int MSG_STOP_DATA_STREAM = 13;
// pressureNET Live API
public static final int MSG_GET_LOCAL_RECENTS = 14;
public static final int MSG_LOCAL_RECENTS = 15;
public static final int MSG_GET_API_RECENTS = 16;
public static final int MSG_API_RECENTS = 17;
public static final int MSG_MAKE_API_CALL = 18;
public static final int MSG_API_RESULT_COUNT = 19;
// pressureNET API Cache
public static final int MSG_CLEAR_LOCAL_CACHE = 20;
public static final int MSG_REMOVE_FROM_PRESSURENET = 21;
public static final int MSG_CLEAR_API_CACHE = 22;
// Current Conditions
public static final int MSG_ADD_CURRENT_CONDITION = 23;
public static final int MSG_GET_CURRENT_CONDITIONS = 24;
public static final int MSG_CURRENT_CONDITIONS = 25;
// Sending Data
public static final int MSG_SEND_OBSERVATION = 26;
public static final int MSG_SEND_CURRENT_CONDITION = 27;
// Current Conditions API
public static final int MSG_MAKE_CURRENT_CONDITIONS_API_CALL = 28;
// Notifications
public static final int MSG_CHANGE_NOTIFICATION = 31;
// Data management
public static final int MSG_COUNT_LOCAL_OBS = 32;
public static final int MSG_COUNT_API_CACHE = 33;
public static final int MSG_COUNT_LOCAL_OBS_TOTALS = 34;
public static final int MSG_COUNT_API_CACHE_TOTALS = 35;
// Graphing
public static final int MSG_GET_API_RECENTS_FOR_GRAPH = 36;
public static final int MSG_API_RECENTS_FOR_GRAPH = 37;
long lastAPICall = System.currentTimeMillis();
private CbObservation collectedObservation;
private final Handler mHandler = new Handler();
Messenger mMessenger = new Messenger(new IncomingHandler());
ArrayList<CbObservation> offlineBuffer = new ArrayList<CbObservation>();
private long lastPressureChangeAlert = 0;
Messenger lastMessenger;
/**
* Find all the data for an observation.
*
* Location, Measurement values, etc.
*
* @return
*/
public CbObservation collectNewObservation() {
try {
CbObservation pressureObservation = new CbObservation();
log("cb collecting new observation");
// Location values
locationManager = new CbLocationManager(getApplicationContext());
locationManager.startGettingLocations();
// Measurement values
pressureObservation = dataCollector.getPressureObservation();
pressureObservation.setLocation(locationManager.getCurrentBestLocation());
// stop listening for locations
LocationStopper stop = new LocationStopper();
mHandler.postDelayed(stop, 1000 * 10);
return pressureObservation;
} catch (Exception e) {
return null;
}
}
private class LocationStopper implements Runnable {
@Override
public void run() {
try {
locationManager.stopGettingLocations();
} catch(Exception e) {
}
}
}
/**
* Send a single reading.
* TODO: This is ugly copy+paste from the original ReadingSender. Fix that.
*/
public class SingleReadingSender implements Runnable {
@Override
public void run() {
log("collecting and submitting single " + settingsHandler.getServerURL());
dataCollector.startCollectingData(null);
CbObservation singleObservation = new CbObservation();
if (settingsHandler.isCollectingData()) {
// Collect
singleObservation = collectNewObservation();
if (singleObservation.getObservationValue() != 0.0) {
// Store in database
db.open();
long count = db.addObservation(singleObservation);
db.close();
try {
if (settingsHandler.isSharingData()) {
// Send if we're online
if (isNetworkAvailable()) {
log("online and sending single");
singleObservation
.setClientKey(getApplicationContext()
.getPackageName());
sendCbObservation(singleObservation);
// also check and send the offline buffer
if (offlineBuffer.size() > 0) {
log("sending " + offlineBuffer.size() + " offline buffered obs");
for (CbObservation singleOffline : offlineBuffer) {
sendCbObservation(singleObservation);
}
offlineBuffer.clear();
}
} else {
log("didn't send, not sharing data; i.e., offline");
// / offline buffer variable
// TODO: put this in the DB to survive longer
offlineBuffer.add(singleObservation);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
}
/**
* Collect and send data in a different thread. This runs itself every
* "settingsHandler.getDataCollectionFrequency()" milliseconds
*/
private class ReadingSender implements Runnable {
public void run() {
// retrieve updated settings
settingsHandler = settingsHandler.getSettings();
log("collecting and submitting " + settingsHandler.getServerURL());
int dataCollecting = dataCollector.startCollectingData(null);
long base = SystemClock.uptimeMillis();
boolean okayToGo = true;
// Check if we're supposed to be charging and if we are.
// Bail if appropriate
if(settingsHandler.isOnlyWhenCharging()) {
if(!isCharging()) {
okayToGo = false;
}
}
if (okayToGo && settingsHandler.isCollectingData()) {
// Collect
CbObservation singleObservation = new CbObservation();
singleObservation = collectNewObservation();
if(singleObservation != null) {
if (singleObservation.getObservationValue() != 0.0) {
// Store in database
db.open();
long count = db.addObservation(singleObservation);
db.close();
try {
if (settingsHandler.isSharingData()) {
// Send if we're online
if (isNetworkAvailable()) {
log("online and sending");
singleObservation
.setClientKey(getApplicationContext()
.getPackageName());
sendCbObservation(singleObservation);
// also check and send the offline buffer
if (offlineBuffer.size() > 0) {
log("sending " + offlineBuffer.size() + " offline buffered obs");
for (CbObservation singleOffline : offlineBuffer) {
sendCbObservation(singleObservation);
}
offlineBuffer.clear();
}
} else {
log("didn't send");
// / offline buffer variable
// TODO: put this in the DB to survive longer
offlineBuffer.add(singleObservation);
}
}
// If notifications are enabled,
log("is send notif " + settingsHandler.isSendNotifications());
if (settingsHandler.isSendNotifications()) {
// check for pressure local trend changes and notify
// the client
// ensure this only happens every once in a while
long rightNow = System.currentTimeMillis();
long sixHours = 1000 * 60 * 60 * 6;
if (rightNow - lastPressureChangeAlert > (sixHours)) {
long timeLength = 1000 * 60 * 60 * 3;
db.open();
Cursor localCursor = db.runLocalAPICall(-90,
90, -180, 180,
System.currentTimeMillis()
- (timeLength),
System.currentTimeMillis(), 1000);
ArrayList<CbObservation> recents = new ArrayList<CbObservation>();
while (localCursor.moveToNext()) {
// just need observation value, time, and
// location
CbObservation obs = new CbObservation();
obs.setObservationValue(localCursor.getDouble(8));
obs.setTime(localCursor.getLong(10));
Location location = new Location("network");
location.setLatitude(localCursor
.getDouble(1));
location.setLongitude(localCursor
.getDouble(2));
obs.setLocation(location);
recents.add(obs);
}
String tendencyChange = CbScience
.changeInTrend(recents);
db.close();
log("cbservice tendency changes: " + tendencyChange);
if (tendencyChange.contains(",")
&& (!tendencyChange.toLowerCase()
.contains("unknown"))) {
String[] tendencies = tendencyChange
.split(",");
if (!tendencies[0].equals(tendencies[1])) {
log("Trend change! " + tendencyChange);
// TODO: send message to deliver
// Android notification of tendency change
try {
if(lastMessenger!= null) {
lastMessenger.send(Message.obtain(null,
MSG_CHANGE_NOTIFICATION, tendencyChange));
} else {
log("readingsender didn't send notif, no lastMessenger");
}
} catch(Exception e) {
e.printStackTrace();
}
lastPressureChangeAlert = rightNow;
} else {
log("tendency equal");
}
}
} else {
// wait
log("tendency; hasn't been 6h, min wait time yet");
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
} else {
log("singleobservation is null, not sending");
}
} else {
log("tried collecting, reading zero");
}
sender = this;
mHandler.postAtTime(sender,
base + (settingsHandler.getDataCollectionFrequency()));
}
}
public boolean isNetworkAvailable() {
log("is net available?");
ConnectivityManager cm = (ConnectivityManager) this
.getSystemService(Context.CONNECTIVITY_SERVICE);
// test for connection
if (cm.getActiveNetworkInfo() != null
&& cm.getActiveNetworkInfo().isAvailable()
&& cm.getActiveNetworkInfo().isConnected()) {
log("yes");
return true;
} else {
log("no");
return false;
}
}
/**
* Stop all listeners, active sensors, etc, and shut down.
*
*/
public void stopAutoSubmit() {
if (locationManager != null) {
locationManager.stopGettingLocations();
}
if (dataCollector != null) {
dataCollector.stopCollectingData();
}
mHandler.removeCallbacks(sender);
}
/**
* Send the observation to the server
*
* @param observation
* @return
*/
public boolean sendCbObservation(CbObservation observation) {
try {
CbDataSender sender = new CbDataSender(getApplicationContext());
sender.setSettings(settingsHandler, locationManager, dataCollector);
sender.execute(observation.getObservationAsParams());
return true;
} catch (Exception e) {
return false;
}
}
/**
* Send a new account to the server
*
* @param account
* @return
*/
public boolean sendCbAccount(CbAccount account) {
try {
CbDataSender sender = new CbDataSender(getApplicationContext());
sender.setSettings(settingsHandler, locationManager, dataCollector);
sender.execute(account.getAccountAsParams());
return true;
} catch (Exception e) {
return false;
}
}
/**
* Send the current condition to the server
*
* @param observation
* @return
*/
public boolean sendCbCurrentCondition(CbCurrentCondition condition) {
log("sending cbcurrent condition");
try {
CbDataSender sender = new CbDataSender(getApplicationContext());
sender.setSettings(settingsHandler, locationManager, dataCollector);
sender.execute(condition.getCurrentConditionAsParams());
return true;
} catch (Exception e) {
return false;
}
}
/**
* Start the periodic data collection.
*/
public void startAutoSubmit() {
log("CbService: Starting to auto-collect and submit data.");
mHandler.removeCallbacks(sender);
sender = new ReadingSender();
mHandler.post(sender);
}
@Override
public void onDestroy() {
log("on destroy");
stopAutoSubmit();
super.onDestroy();
}
@Override
public void onCreate() {
setUpFiles();
log("cb on create");
db = new CbDb(getApplicationContext());
super.onCreate();
}
/**
* Check charge state for preferences.
* TODO: In future, adjust our collection and submission frequency
* based on battery level.
*/
public boolean isCharging() {
// Check battery and charging status
IntentFilter ifilter = new IntentFilter(Intent.ACTION_BATTERY_CHANGED);
Intent batteryStatus = getApplicationContext().registerReceiver(null, ifilter);
// Are we charging / charged?
int status = batteryStatus.getIntExtra(BatteryManager.EXTRA_STATUS, -1);
boolean isCharging = status == BatteryManager.BATTERY_STATUS_CHARGING ||
status == BatteryManager.BATTERY_STATUS_FULL;
// How are we charging?
/*
int chargePlug = batteryStatus.getIntExtra(BatteryManager.EXTRA_PLUGGED, -1);
boolean usbCharge = chargePlug == BatteryManager.BATTERY_PLUGGED_USB;
boolean acCharge = chargePlug == BatteryManager.BATTERY_PLUGGED_AC;
*/
return isCharging;
}
/**
* Start running background data collection methods.
*
*/
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
log("cb onstartcommand");
if(intent!= null) {
if(intent.getAction() != null ) {
if(intent.getAction().equals(ACTION_SEND_MEASUREMENT)) {
// send just a single measurement
log("sending single observation, request from intent");
sendSingleObs();
return 0;
}
}
}
// Check the intent for Settings initialization
dataCollector = new CbDataCollector(getID(), getApplicationContext());
if(runningCount==0) {
log("starting service code, run count 0");
if (intent != null) {
startWithIntent(intent);
return START_STICKY;
} else {
log("INTENT NULL; checking db");
startWithDatabase();
}
runningCount++;
} else {
log("not starting, run count " + runningCount);
}
super.onStartCommand(intent, flags, startId);
return START_STICKY;
}
/**
* Convert time ago text to ms. TODO: not this. values in xml.
*
* @param timeAgo
* @return
*/
public static long stringTimeToLongHack(String timeAgo) {
if (timeAgo.equals("1 minute")) {
return 1000 * 60;
} else if (timeAgo.equals("5 minutes")) {
return 1000 * 60 * 5;
} else if (timeAgo.equals("10 minutes")) {
return 1000 * 60 * 10;
} else if (timeAgo.equals("30 minutes")) {
return 1000 * 60 * 30;
} else if (timeAgo.equals("1 hour")) {
return 1000 * 60 * 60;
}
return 1000 * 60 * 10;
}
public void startWithIntent(Intent intent) {
try {
settingsHandler = new CbSettingsHandler(getApplicationContext());
settingsHandler.setServerURL(serverURL);
settingsHandler.setAppID("ca.cumulonimbus.barometernetwork");
SharedPreferences sharedPreferences = PreferenceManager
.getDefaultSharedPreferences(this);
String preferenceCollectionFrequency = sharedPreferences.getString(
"autofrequency", "10 minutes");
boolean preferenceShareData = sharedPreferences.getBoolean(
"autoupdate", true);
String preferenceShareLevel = sharedPreferences.getString(
"sharing_preference", "Us, Researchers and Forecasters");
boolean preferenceSendNotifications = sharedPreferences.getBoolean(
"send_notifications", false);
settingsHandler
.setDataCollectionFrequency(stringTimeToLongHack(preferenceCollectionFrequency));
settingsHandler.setSendNotifications(preferenceSendNotifications);
boolean useGPS = sharedPreferences.getBoolean("use_gps", true);
boolean onlyWhenCharging = sharedPreferences.getBoolean("only_when_charging", false);
settingsHandler.setUseGPS(useGPS);
settingsHandler.setOnlyWhenCharging(onlyWhenCharging);
log("cbservice startwithintent " + settingsHandler);
// Seems like new settings. Try adding to the db.
settingsHandler.saveSettings();
// are we creating a new user?
if (intent.hasExtra("add_account")) {
log("adding new user");
CbAccount account = new CbAccount();
account.setEmail(intent.getStringExtra("email"));
account.setTimeRegistered(intent.getLongExtra("time", 0));
account.setUserID(intent.getStringExtra("userID"));
sendCbAccount(account);
}
// Start a new thread and return
startAutoSubmit();
} catch (Exception e) {
for (StackTraceElement ste : e.getStackTrace()) {
log(ste.getMethodName() + ste.getLineNumber());
}
}
}
public void startWithDatabase() {
try {
db.open();
// Check the database for Settings initialization
settingsHandler = new CbSettingsHandler(getApplicationContext());
// db.clearDb();
Cursor allSettings = db.fetchAllSettings();
log("cb intent null; checking db, size " + allSettings.getCount());
while (allSettings.moveToNext()) {
settingsHandler.setAppID(allSettings.getString(1));
settingsHandler.setDataCollectionFrequency(allSettings
.getLong(2));
settingsHandler.setServerURL(serverURL);
settingsHandler.setShareLevel(allSettings.getString(7));
// booleans
int onlyWhenCharging = allSettings.getInt(4);
int useGPS = allSettings.getInt(9);
int sendNotifications = allSettings.getInt(8);
boolean boolCharging = (onlyWhenCharging > 0);
boolean boolGPS = (useGPS > 0);
boolean boolSendNotifications = (sendNotifications > 0);
log("only when charging processed " + boolCharging + " gps " + boolGPS);
settingsHandler.setSendNotifications(boolSendNotifications);
settingsHandler.setOnlyWhenCharging(boolCharging);
settingsHandler.setUseGPS(boolGPS);
settingsHandler.saveSettings();
log("cbservice startwithdb, " + settingsHandler);
startAutoSubmit();
// but just once
break;
}
db.close();
} catch (Exception e) {
for (StackTraceElement ste : e.getStackTrace()) {
log(ste.getMethodName() + ste.getLineNumber());
}
}
}
/**
* Handler of incoming messages from clients.
*/
class IncomingHandler extends Handler {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_STOP:
log("message. bound service says stop");
stopAutoSubmit();
break;
case MSG_GET_BEST_LOCATION:
log("message. bound service requesting location");
if (locationManager != null) {
Location best = locationManager.getCurrentBestLocation();
try {
log("service sending best location");
msg.replyTo.send(Message.obtain(null,
MSG_BEST_LOCATION, best));
} catch (RemoteException re) {
re.printStackTrace();
}
} else {
log("error: location null, not returning");
}
break;
case MSG_GET_BEST_PRESSURE:
log("message. bound service requesting pressure");
if (dataCollector != null) {
CbObservation pressure = dataCollector
.getPressureObservation();
try {
log("service sending best pressure");
msg.replyTo.send(Message.obtain(null,
MSG_BEST_PRESSURE, pressure));
} catch (RemoteException re) {
re.printStackTrace();
}
} else {
log("error: data collector null, not returning");
}
break;
case MSG_START_AUTOSUBMIT:
//log("start autosubmit");
//startWithDatabase();
break;
case MSG_STOP_AUTOSUBMIT:
log("stop autosubmit");
stopAutoSubmit();
break;
case MSG_GET_SETTINGS:
log("get settings");
try {
msg.replyTo.send(Message.obtain(null, MSG_SETTINGS,
settingsHandler));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_START_DATA_STREAM:
startDataStream(msg.replyTo);
break;
case MSG_STOP_DATA_STREAM:
stopDataStream();
break;
case MSG_SET_SETTINGS:
log("set settings");
CbSettingsHandler newSettings = (CbSettingsHandler) msg.obj;
System.out.println("cbservice received new settings, " + newSettings);
newSettings.saveSettings();
break;
case MSG_GET_LOCAL_RECENTS:
log("get local recents");
recentMsg = msg;
CbApiCall apiCall = (CbApiCall) msg.obj;
if (apiCall == null) {
//log("apicall null, bailing");
break;
}
// run API call
db.open();
Cursor cursor = db.runLocalAPICall(apiCall.getMinLat(),
apiCall.getMaxLat(), apiCall.getMinLon(),
apiCall.getMaxLon(), apiCall.getStartTime(),
apiCall.getEndTime(), 2000);
ArrayList<CbObservation> results = new ArrayList<CbObservation>();
while (cursor.moveToNext()) {
// TODO: This is duplicated in CbDataCollector. Fix that
CbObservation obs = new CbObservation();
Location location = new Location("network");
location.setLatitude(cursor.getDouble(1));
location.setLongitude(cursor.getDouble(2));
location.setAltitude(cursor.getDouble(3));
location.setAccuracy(cursor.getInt(4));
location.setProvider(cursor.getString(5));
obs.setLocation(location);
obs.setObservationType(cursor.getString(6));
obs.setObservationUnit(cursor.getString(7));
obs.setObservationValue(cursor.getDouble(8));
obs.setSharing(cursor.getString(9));
obs.setTime(cursor.getLong(10));
obs.setTimeZoneOffset(cursor.getInt(11));
obs.setUser_id(cursor.getString(12));
obs.setTrend(cursor.getString(18));
// TODO: Add sensor information
results.add(obs);
}
db.close();
log("cbservice: " + results.size() + " local api results");
try {
msg.replyTo.send(Message.obtain(null, MSG_LOCAL_RECENTS,
results));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_GET_API_RECENTS:
CbApiCall apiCacheCall = (CbApiCall) msg.obj;
log("get api recents " + apiCacheCall.toString());
// run API call
try {
db.open();
Cursor cacheCursor = db.runAPICacheCall(
apiCacheCall.getMinLat(), apiCacheCall.getMaxLat(),
apiCacheCall.getMinLon(), apiCacheCall.getMaxLon(),
apiCacheCall.getStartTime(), apiCacheCall.getEndTime(),
apiCacheCall.getLimit());
ArrayList<CbObservation> cacheResults = new ArrayList<CbObservation>();
while (cacheCursor.moveToNext()) {
CbObservation obs = new CbObservation();
Location location = new Location("network");
location.setLatitude(cacheCursor.getDouble(1));
location.setLongitude(cacheCursor.getDouble(2));
obs.setLocation(location);
obs.setObservationValue(cacheCursor.getDouble(3));
obs.setTime(cacheCursor.getLong(4));
cacheResults.add(obs);
}
db.close();
try {
msg.replyTo.send(Message.obtain(null, MSG_API_RECENTS,
cacheResults));
} catch (RemoteException re) {
re.printStackTrace();
}
} catch(Exception e) {
}
break;
case MSG_GET_API_RECENTS_FOR_GRAPH:
// TODO: Put this in a method. It's a copy+paste from GET_API_RECENTS
CbApiCall apiCacheCallGraph = (CbApiCall) msg.obj;
log("get api recents " + apiCacheCallGraph.toString());
// run API call
db.open();
Cursor cacheCursorGraph = db.runAPICacheCall(
apiCacheCallGraph.getMinLat(), apiCacheCallGraph.getMaxLat(),
apiCacheCallGraph.getMinLon(), apiCacheCallGraph.getMaxLon(),
apiCacheCallGraph.getStartTime(), apiCacheCallGraph.getEndTime(),
apiCacheCallGraph.getLimit());
ArrayList<CbObservation> cacheResultsGraph = new ArrayList<CbObservation>();
while (cacheCursorGraph.moveToNext()) {
CbObservation obs = new CbObservation();
Location location = new Location("network");
location.setLatitude(cacheCursorGraph.getDouble(1));
location.setLongitude(cacheCursorGraph.getDouble(2));
obs.setLocation(location);
obs.setObservationValue(cacheCursorGraph.getDouble(3));
obs.setTime(cacheCursorGraph.getLong(4));
cacheResultsGraph.add(obs);
}
db.close();
try {
msg.replyTo.send(Message.obtain(null, MSG_API_RECENTS_FOR_GRAPH,
cacheResultsGraph));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_MAKE_API_CALL:
CbApi api = new CbApi(getApplicationContext());
CbApiCall liveApiCall = (CbApiCall) msg.obj;
liveApiCall.setCallType("Readings");
long timeDiff = System.currentTimeMillis() - lastAPICall;
deleteOldData();
lastAPICall = api.makeAPICall(liveApiCall, service,
msg.replyTo, "Readings");
break;
case MSG_MAKE_CURRENT_CONDITIONS_API_CALL:
CbApi conditionApi = new CbApi(getApplicationContext());
CbApiCall conditionApiCall = (CbApiCall) msg.obj;
conditionApiCall.setCallType("Conditions");
conditionApi.makeAPICall(conditionApiCall, service,
msg.replyTo, "Conditions");
break;
case MSG_CLEAR_LOCAL_CACHE:
db.open();
db.clearLocalCache();
long count = db.getUserDataCount();
db.close();
try {
msg.replyTo.send(Message.obtain(null,
MSG_COUNT_LOCAL_OBS_TOTALS, (int) count, 0));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_REMOVE_FROM_PRESSURENET:
// TODO: Implement
break;
case MSG_CLEAR_API_CACHE:
db.open();
db.clearAPICache();
db.open();
long countCache = db.getDataCacheCount();
db.close();
try {
msg.replyTo.send(Message.obtain(null,
MSG_COUNT_API_CACHE_TOTALS, (int) countCache, 0));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_ADD_CURRENT_CONDITION:
CbCurrentCondition cc = (CbCurrentCondition) msg.obj;
db.open();
db.addCondition(cc);
db.close();
break;
case MSG_GET_CURRENT_CONDITIONS:
recentMsg = msg;
db.open();
CbApiCall currentConditionAPI = (CbApiCall) msg.obj;
Cursor ccCursor = db.getCurrentConditions(
currentConditionAPI.getMinLat(),
currentConditionAPI.getMaxLat(),
currentConditionAPI.getMinLon(),
currentConditionAPI.getMaxLon(),
currentConditionAPI.getStartTime(),
currentConditionAPI.getEndTime(), 1000);
ArrayList<CbCurrentCondition> conditions = new ArrayList<CbCurrentCondition>();
while (ccCursor.moveToNext()) {
CbCurrentCondition cur = new CbCurrentCondition();
Location location = new Location("network");
location.setLatitude(ccCursor.getDouble(1));
location.setLongitude(ccCursor.getDouble(2));
location.setAltitude(ccCursor.getDouble(3));
location.setAccuracy(ccCursor.getInt(4));
location.setProvider(ccCursor.getString(5));
cur.setLocation(location);
cur.setTime(ccCursor.getLong(6));
cur.setTime(ccCursor.getLong(7));
cur.setUser_id(ccCursor.getString(9));
cur.setGeneral_condition(ccCursor.getString(10));
cur.setWindy(ccCursor.getString(11));
cur.setFog_thickness(ccCursor.getString(12));
cur.setCloud_type(ccCursor.getString(13));
cur.setPrecipitation_type(ccCursor.getString(14));
cur.setPrecipitation_amount(ccCursor.getDouble(15));
cur.setPrecipitation_unit(ccCursor.getString(16));
cur.setThunderstorm_intensity(ccCursor.getString(17));
cur.setUser_comment(ccCursor.getString(18));
conditions.add(cur);
}
db.close();
try {
msg.replyTo.send(Message.obtain(null,
MSG_CURRENT_CONDITIONS, conditions));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_SEND_CURRENT_CONDITION:
CbCurrentCondition condition = (CbCurrentCondition) msg.obj;
if(settingsHandler == null ) {
settingsHandler = new CbSettingsHandler(getApplicationContext());
settingsHandler.setServerURL(serverURL);
settingsHandler.setAppID("ca.cumulonimbus.barometernetwork");
}
try {
condition.setSharing_policy(settingsHandler.getShareLevel());
sendCbCurrentCondition(condition);
} catch (Exception e) {
e.printStackTrace();
}
break;
case MSG_SEND_OBSERVATION:
log("sending single observation, request from app");
sendSingleObs();
break;
case MSG_COUNT_LOCAL_OBS:
db.open();
long countLocalObsOnly = db.getUserDataCount();
db.close();
try {
msg.replyTo.send(Message.obtain(null,
MSG_COUNT_LOCAL_OBS_TOTALS,
(int) countLocalObsOnly, 0));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_COUNT_API_CACHE:
db.open();
long countCacheOnly = db.getDataCacheCount();
db.close();
try {
msg.replyTo.send(Message
.obtain(null, MSG_COUNT_API_CACHE_TOTALS,
(int) countCacheOnly, 0));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_CHANGE_NOTIFICATION:
if(msg.replyTo != null) {
lastMessenger = msg.replyTo;
} else {
}
break;
default:
super.handleMessage(msg);
}
}
}
public void sendSingleObs() {
SingleReadingSender singleSender = new SingleReadingSender();
mHandler.post(singleSender);
}
/**
* Remove older data from cache to keep the size reasonable
*
* @return
*/
public void deleteOldData() {
log("deleting old data");
db.open();
db.deleteOldCacheData();
db.close();
}
public boolean notifyAPIResult(Messenger reply, int count) {
try {
if (reply == null) {
log("cannot notify, reply is null");
} else {
reply.send(Message.obtain(null, MSG_API_RESULT_COUNT, count, 0));
}
} catch (RemoteException re) {
re.printStackTrace();
} catch (NullPointerException npe) {
npe.printStackTrace();
}
return false;
}
public CbObservation recentPressureFromDatabase() {
CbObservation obs = new CbObservation();
double pressure = 0.0;
try {
long rowId = db.fetchObservationMaxID();
Cursor c = db.fetchObservation(rowId);
while (c.moveToNext()) {
pressure = c.getDouble(8);
}
log(pressure + " pressure from db");
if (pressure == 0.0) {
log("returning null");
return null;
}
obs.setObservationValue(pressure);
return obs;
} catch(Exception e) {
obs.setObservationValue(pressure);
return obs;
}
}
private class StreamObservation extends AsyncTask<Messenger, Void, String> {
@Override
protected String doInBackground(Messenger... m) {
try {
for (Messenger msgr : m) {
if (msgr != null) {
msgr.send(Message.obtain(null, MSG_DATA_STREAM,
recentPressureFromDatabase()));
} else {
log("messenger is null");
}
}
} catch (RemoteException re) {
re.printStackTrace();
}
return "
}
@Override
protected void onPostExecute(String result) {
}
@Override
protected void onPreExecute() {
}
@Override
protected void onProgressUpdate(Void... values) {
}
}
public void startDataStream(Messenger m) {
log("cbService starting stream " + (m == null));
if(dataCollector!=null) {
dataCollector.startCollectingData(m);
new StreamObservation().execute(m);
}
}
public void stopDataStream() {
log("cbservice stopping stream");
if(dataCollector!=null) {
dataCollector.stopCollectingData();
}
}
/**
* Get a hash'd device ID
*
* @return
*/
public String getID() {
try {
MessageDigest md = MessageDigest.getInstance("MD5");
String actual_id = Secure.getString(getApplicationContext()
.getContentResolver(), Secure.ANDROID_ID);
byte[] bytes = actual_id.getBytes();
byte[] digest = md.digest(bytes);
StringBuffer hexString = new StringBuffer();
for (int i = 0; i < digest.length; i++) {
hexString.append(Integer.toHexString(0xFF & digest[i]));
}
return hexString.toString();
} catch (Exception e) {
return "
}
}
// Used to write a log to SD card. Not used unless logging enabled.
public void setUpFiles() {
try {
File homeDirectory = getExternalFilesDir(null);
if (homeDirectory != null) {
mAppDir = homeDirectory.getAbsolutePath();
}
} catch (Exception e) {
e.printStackTrace();
}
}
// Log data to SD card for debug purposes.
// To enable logging, ensure the Manifest allows writing to SD card.
public void logToFile(String text) {
try {
OutputStream output = new FileOutputStream(mAppDir + "/log.txt",
true);
String logString = (new Date()).toString() + ": " + text + "\n";
output.write(logString.getBytes());
output.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
@Override
public IBinder onBind(Intent intent) {
log("on bind");
return mMessenger.getBinder();
}
@Override
public void onRebind(Intent intent) {
log("on rebind");
super.onRebind(intent);
}
public void log(String message) {
logToFile(message);
System.out.println(message);
}
public CbDataCollector getDataCollector() {
return dataCollector;
}
public void setDataCollector(CbDataCollector dataCollector) {
this.dataCollector = dataCollector;
}
public CbLocationManager getLocationManager() {
return locationManager;
}
public void setLocationManager(CbLocationManager locationManager) {
this.locationManager = locationManager;
}
}
|
package com.opengamma.engine.view.server;
import org.fudgemsg.FudgeContextConfiguration;
import org.fudgemsg.mapping.FudgeObjectDictionary;
import com.opengamma.engine.position.Portfolio;
import com.opengamma.engine.position.PortfolioNode;
import com.opengamma.engine.position.Position;
import com.opengamma.engine.view.ViewCalculationResultModel;
import com.opengamma.engine.view.ViewComputationResultModel;
import com.opengamma.engine.view.ViewDefinition;
import com.opengamma.engine.view.ViewDeltaResultModel;
/**
* Registers custom builders for Portfolio, PortfolioNode, and Position with a FudgeContext
*/
public class EngineFudgeContextConfiguration extends FudgeContextConfiguration {
// REVIEW kirk 2010-05-22 -- Any reason this shouldn't be a pure singleton?
/**
* The singleton Fudge context for a running Engine.
*/
public static final FudgeContextConfiguration INSTANCE = new EngineFudgeContextConfiguration();
public void configureFudgeObjectDictionary(final FudgeObjectDictionary dictionary) {
dictionary.getDefaultBuilderFactory().addGenericBuilder(Portfolio.class, new PortfolioBuilder());
dictionary.getDefaultBuilderFactory().addGenericBuilder(PortfolioNode.class, new PortfolioNodeBuilder());
dictionary.getDefaultBuilderFactory().addGenericBuilder(Position.class, new PositionBuilder());
dictionary.addBuilder(ViewDefinition.class, new ViewDefinitionBuilder());
dictionary.getDefaultBuilderFactory().addGenericBuilder(ViewCalculationResultModel.class, new ViewCalculationResultModelBuilder());
dictionary.getDefaultBuilderFactory().addGenericBuilder(ViewComputationResultModel.class, new ViewComputationResultModelBuilder());
dictionary.getDefaultBuilderFactory().addGenericBuilder(ViewDeltaResultModel.class, new ViewDeltaResultModelBuilder());
}
}
|
package org.opencms.cmis;
import static org.opencms.cmis.CmsCmisUtil.checkResourceName;
import static org.opencms.cmis.CmsCmisUtil.ensureLock;
import static org.opencms.cmis.CmsCmisUtil.handleCmsException;
import static org.opencms.cmis.CmsCmisUtil.splitFilter;
import org.opencms.configuration.CmsConfigurationException;
import org.opencms.configuration.CmsParameterConfiguration;
import org.opencms.file.CmsFile;
import org.opencms.file.CmsObject;
import org.opencms.file.CmsProject;
import org.opencms.file.CmsProperty;
import org.opencms.file.CmsResource;
import org.opencms.file.CmsResourceFilter;
import org.opencms.file.CmsVfsResourceAlreadyExistsException;
import org.opencms.file.types.CmsResourceTypeFolder;
import org.opencms.file.types.I_CmsResourceType;
import org.opencms.main.CmsException;
import org.opencms.main.CmsLog;
import org.opencms.main.OpenCms;
import org.opencms.relations.CmsRelation;
import org.opencms.relations.CmsRelationFilter;
import org.opencms.repository.CmsRepositoryFilter;
import org.opencms.util.CmsFileUtil;
import org.opencms.util.CmsStringUtil;
import org.opencms.util.CmsUUID;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.chemistry.opencmis.commons.PropertyIds;
import org.apache.chemistry.opencmis.commons.data.Acl;
import org.apache.chemistry.opencmis.commons.data.AllowableActions;
import org.apache.chemistry.opencmis.commons.data.ContentStream;
import org.apache.chemistry.opencmis.commons.data.FailedToDeleteData;
import org.apache.chemistry.opencmis.commons.data.ObjectData;
import org.apache.chemistry.opencmis.commons.data.ObjectInFolderContainer;
import org.apache.chemistry.opencmis.commons.data.ObjectInFolderData;
import org.apache.chemistry.opencmis.commons.data.ObjectInFolderList;
import org.apache.chemistry.opencmis.commons.data.ObjectList;
import org.apache.chemistry.opencmis.commons.data.ObjectParentData;
import org.apache.chemistry.opencmis.commons.data.PermissionMapping;
import org.apache.chemistry.opencmis.commons.data.Properties;
import org.apache.chemistry.opencmis.commons.data.PropertyData;
import org.apache.chemistry.opencmis.commons.data.RepositoryInfo;
import org.apache.chemistry.opencmis.commons.definitions.PermissionDefinition;
import org.apache.chemistry.opencmis.commons.definitions.TypeDefinition;
import org.apache.chemistry.opencmis.commons.definitions.TypeDefinitionContainer;
import org.apache.chemistry.opencmis.commons.definitions.TypeDefinitionList;
import org.apache.chemistry.opencmis.commons.enums.AclPropagation;
import org.apache.chemistry.opencmis.commons.enums.CapabilityAcl;
import org.apache.chemistry.opencmis.commons.enums.CapabilityChanges;
import org.apache.chemistry.opencmis.commons.enums.CapabilityContentStreamUpdates;
import org.apache.chemistry.opencmis.commons.enums.CapabilityJoin;
import org.apache.chemistry.opencmis.commons.enums.CapabilityQuery;
import org.apache.chemistry.opencmis.commons.enums.CapabilityRenditions;
import org.apache.chemistry.opencmis.commons.enums.IncludeRelationships;
import org.apache.chemistry.opencmis.commons.enums.RelationshipDirection;
import org.apache.chemistry.opencmis.commons.enums.SupportedPermissions;
import org.apache.chemistry.opencmis.commons.enums.UnfileObject;
import org.apache.chemistry.opencmis.commons.enums.VersioningState;
import org.apache.chemistry.opencmis.commons.exceptions.CmisConstraintException;
import org.apache.chemistry.opencmis.commons.exceptions.CmisContentAlreadyExistsException;
import org.apache.chemistry.opencmis.commons.exceptions.CmisInvalidArgumentException;
import org.apache.chemistry.opencmis.commons.exceptions.CmisNameConstraintViolationException;
import org.apache.chemistry.opencmis.commons.exceptions.CmisNotSupportedException;
import org.apache.chemistry.opencmis.commons.exceptions.CmisObjectNotFoundException;
import org.apache.chemistry.opencmis.commons.exceptions.CmisPermissionDeniedException;
import org.apache.chemistry.opencmis.commons.exceptions.CmisRuntimeException;
import org.apache.chemistry.opencmis.commons.exceptions.CmisStreamNotSupportedException;
import org.apache.chemistry.opencmis.commons.impl.dataobjects.AclCapabilitiesDataImpl;
import org.apache.chemistry.opencmis.commons.impl.dataobjects.ContentStreamImpl;
import org.apache.chemistry.opencmis.commons.impl.dataobjects.FailedToDeleteDataImpl;
import org.apache.chemistry.opencmis.commons.impl.dataobjects.ObjectInFolderContainerImpl;
import org.apache.chemistry.opencmis.commons.impl.dataobjects.ObjectInFolderDataImpl;
import org.apache.chemistry.opencmis.commons.impl.dataobjects.ObjectInFolderListImpl;
import org.apache.chemistry.opencmis.commons.impl.dataobjects.ObjectListImpl;
import org.apache.chemistry.opencmis.commons.impl.dataobjects.ObjectParentDataImpl;
import org.apache.chemistry.opencmis.commons.impl.dataobjects.PermissionDefinitionDataImpl;
import org.apache.chemistry.opencmis.commons.impl.dataobjects.PermissionMappingDataImpl;
import org.apache.chemistry.opencmis.commons.impl.dataobjects.RepositoryCapabilitiesImpl;
import org.apache.chemistry.opencmis.commons.impl.dataobjects.RepositoryInfoImpl;
import org.apache.chemistry.opencmis.commons.spi.Holder;
import org.apache.commons.logging.Log;
/**
* Repository instance for CMIS repositories.<p>
*/
public class CmsCmisRepository extends A_CmsCmisRepository {
@SuppressWarnings("serial")
private static class PermissionMappings extends HashMap<String, PermissionMapping> {
/** Default constructor.<p> */
public PermissionMappings() {
}
private static PermissionMapping createMapping(String key, String permission) {
PermissionMappingDataImpl pm = new PermissionMappingDataImpl();
pm.setKey(key);
pm.setPermissions(Collections.singletonList(permission));
return pm;
}
public PermissionMappings add(String key, String permission) {
put(key, createMapping(key, permission));
return this;
}
}
/** The logger instance for this class. */
protected static final Log LOG = CmsLog.getLog(CmsCmisRepository.class);
/** The internal admin CMS context. */
private CmsObject m_adminCms;
/** The repository description. */
private String m_description;
/** The repository filter. */
private CmsRepositoryFilter m_filter;
/** The repository id. */
private String m_id;
/**
* Readonly flag to prevent write operations on the repository.<p>
*/
private boolean m_isReadOnly;
/** The parameter configuration map. */
private CmsParameterConfiguration m_parameterConfiguration = new CmsParameterConfiguration();
/** The project of the repository. */
private CmsProject m_project;
/** The relation object helper. */
private CmsCmisRelationHelper m_relationHelper = new CmsCmisRelationHelper(this);
/** The resource object helper. */
private CmsCmisResourceHelper m_resourceHelper = new CmsCmisResourceHelper(this);
/** The root folder. */
private CmsResource m_root;
private static PermissionDefinition createPermission(String permission, String description) {
PermissionDefinitionDataImpl pd = new PermissionDefinitionDataImpl();
pd.setPermission(permission);
pd.setDescription(description);
return pd;
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#addConfigurationParameter(java.lang.String, java.lang.String)
*/
public void addConfigurationParameter(String paramName, String paramValue) {
m_parameterConfiguration.add(paramName, paramValue);
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#createDocument(org.opencms.cmis.CmsCmisCallContext, org.apache.chemistry.opencmis.commons.data.Properties, java.lang.String, org.apache.chemistry.opencmis.commons.data.ContentStream, org.apache.chemistry.opencmis.commons.enums.VersioningState, java.util.List, org.apache.chemistry.opencmis.commons.data.Acl, org.apache.chemistry.opencmis.commons.data.Acl)
*/
public synchronized String createDocument(
CmsCmisCallContext context,
Properties propertiesObj,
String folderId,
ContentStream contentStream,
VersioningState versioningState,
List<String> policies,
Acl addAces,
Acl removeAces) {
checkWriteAccess();
if ((addAces != null) || (removeAces != null)) {
throw new CmisConstraintException("createDocument: ACEs not allowed");
}
if (contentStream == null) {
throw new CmisConstraintException("createDocument: no content stream given");
}
try {
CmsObject cms = getCmsObject(context);
Map<String, PropertyData<?>> properties = propertiesObj.getProperties();
String newDocName = (String)properties.get(PropertyIds.NAME).getFirstValue();
String defaultType = OpenCms.getResourceManager().getDefaultTypeForName(newDocName).getTypeName();
String resTypeName = getResourceTypeFromProperties(properties, defaultType);
I_CmsResourceType cmsResourceType = OpenCms.getResourceManager().getResourceType(resTypeName);
if (cmsResourceType.isFolder()) {
throw new CmisConstraintException("Not a document type: " + resTypeName);
}
List<CmsProperty> cmsProperties = getOpenCmsProperties(properties);
checkResourceName(newDocName);
InputStream stream = contentStream.getStream();
byte[] content = CmsFileUtil.readFully(stream);
CmsUUID parentFolderId = new CmsUUID(folderId);
CmsResource parentFolder = cms.readResource(parentFolderId);
String newFolderPath = CmsStringUtil.joinPaths(parentFolder.getRootPath(), newDocName);
try {
CmsResource newDocument = cms.createResource(
newFolderPath,
cmsResourceType.getTypeId(),
content,
cmsProperties);
cms.unlockResource(newDocument.getRootPath());
return newDocument.getStructureId().toString();
} catch (CmsVfsResourceAlreadyExistsException e) {
throw new CmisNameConstraintViolationException(e.getLocalizedMessage(), e);
}
} catch (CmsException e) {
handleCmsException(e);
return null;
} catch (IOException e) {
throw new CmisRuntimeException(e.getLocalizedMessage(), e);
}
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#createDocumentFromSource(org.opencms.cmis.CmsCmisCallContext, java.lang.String, org.apache.chemistry.opencmis.commons.data.Properties, java.lang.String, org.apache.chemistry.opencmis.commons.enums.VersioningState, java.util.List, org.apache.chemistry.opencmis.commons.data.Acl, org.apache.chemistry.opencmis.commons.data.Acl)
*/
public synchronized String createDocumentFromSource(
CmsCmisCallContext context,
String sourceId,
Properties propertiesObj,
String folderId,
VersioningState versioningState,
List<String> policies,
Acl addAces,
Acl removeAces) {
checkWriteAccess();
if ((addAces != null) || (removeAces != null)) {
throw new CmisConstraintException("createDocument: ACEs not allowed");
}
try {
CmsObject cms = getCmsObject(context);
Map<String, PropertyData<?>> properties = new HashMap<String, PropertyData<?>>();
if (propertiesObj != null) {
properties = propertiesObj.getProperties();
}
List<CmsProperty> cmsProperties = getOpenCmsProperties(properties);
CmsUUID parentFolderId = new CmsUUID(folderId);
CmsResource parentFolder = cms.readResource(parentFolderId);
CmsUUID sourceUuid = new CmsUUID(sourceId);
CmsResource source = cms.readResource(sourceUuid);
String sourcePath = source.getRootPath();
PropertyData<?> nameProp = properties.get(PropertyIds.NAME);
String newDocName;
if (nameProp != null) {
newDocName = (String)nameProp.getFirstValue();
checkResourceName(newDocName);
} else {
newDocName = CmsResource.getName(source.getRootPath());
}
String targetPath = CmsStringUtil.joinPaths(parentFolder.getRootPath(), newDocName);
try {
cms.copyResource(sourcePath, targetPath);
} catch (CmsVfsResourceAlreadyExistsException e) {
throw new CmisNameConstraintViolationException(e.getLocalizedMessage(), e);
}
CmsResource targetResource = cms.readResource(targetPath);
cms.setDateLastModified(targetResource.getRootPath(), targetResource.getDateCreated(), false);
cms.unlockResource(targetResource);
boolean wasLocked = ensureLock(cms, targetResource);
cms.writePropertyObjects(targetResource, cmsProperties);
if (wasLocked) {
cms.unlockResource(targetResource);
}
return targetResource.getStructureId().toString();
} catch (CmsException e) {
handleCmsException(e);
return null;
}
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#createFolder(org.opencms.cmis.CmsCmisCallContext, org.apache.chemistry.opencmis.commons.data.Properties, java.lang.String, java.util.List, org.apache.chemistry.opencmis.commons.data.Acl, org.apache.chemistry.opencmis.commons.data.Acl)
*/
public synchronized String createFolder(
CmsCmisCallContext context,
Properties propertiesObj,
String folderId,
List<String> policies,
Acl addAces,
Acl removeAces) {
checkWriteAccess();
if ((addAces != null) || (removeAces != null)) {
throw new CmisConstraintException("createFolder: ACEs not allowed");
}
try {
CmsObject cms = getCmsObject(context);
Map<String, PropertyData<?>> properties = propertiesObj.getProperties();
String resTypeName = getResourceTypeFromProperties(properties, CmsResourceTypeFolder.getStaticTypeName());
I_CmsResourceType cmsResourceType = OpenCms.getResourceManager().getResourceType(resTypeName);
if (!cmsResourceType.isFolder()) {
throw new CmisConstraintException("Invalid folder type: " + resTypeName);
}
List<CmsProperty> cmsProperties = getOpenCmsProperties(properties);
String newFolderName = (String)properties.get(PropertyIds.NAME).getFirstValue();
checkResourceName(newFolderName);
CmsUUID parentFolderId = new CmsUUID(folderId);
CmsResource parentFolder = cms.readResource(parentFolderId);
String newFolderPath = CmsStringUtil.joinPaths(parentFolder.getRootPath(), newFolderName);
try {
CmsResource newFolder = cms.createResource(
newFolderPath,
cmsResourceType.getTypeId(),
null,
cmsProperties);
cms.unlockResource(newFolder);
return newFolder.getStructureId().toString();
} catch (CmsVfsResourceAlreadyExistsException e) {
throw new CmisNameConstraintViolationException(e.getLocalizedMessage(), e);
}
} catch (CmsException e) {
handleCmsException(e);
return null;
}
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#createRelationship(org.opencms.cmis.CmsCmisCallContext, org.apache.chemistry.opencmis.commons.data.Properties, java.util.List, org.apache.chemistry.opencmis.commons.data.Acl, org.apache.chemistry.opencmis.commons.data.Acl)
*/
public synchronized String createRelationship(
CmsCmisCallContext context,
Properties properties,
List<String> policies,
Acl addAces,
Acl removeAces) {
try {
CmsObject cms = getCmsObject(context);
Map<String, PropertyData<?>> propertyMap = properties.getProperties();
String sourceProp = (String)(propertyMap.get(PropertyIds.SOURCE_ID).getFirstValue());
String targetProp = (String)(propertyMap.get(PropertyIds.TARGET_ID).getFirstValue());
String typeId = (String)(propertyMap.get(PropertyIds.OBJECT_TYPE_ID).getFirstValue());
if (!typeId.startsWith("opencms:")) {
throw new CmisConstraintException("Can't create this relationship type.");
}
String cmsTypeName = typeId.substring("opencms:".length());
CmsUUID sourceId = new CmsUUID(sourceProp);
CmsUUID targetId = new CmsUUID(targetProp);
CmsResource sourceRes = cms.readResource(sourceId);
boolean wasLocked = ensureLock(cms, sourceRes);
try {
CmsResource targetRes = cms.readResource(targetId);
cms.addRelationToResource(sourceRes.getRootPath(), targetRes.getRootPath(), cmsTypeName);
return "REL_" + sourceRes.getStructureId() + "_" + targetRes.getStructureId() + "_" + cmsTypeName;
} finally {
if (wasLocked) {
cms.unlockResource(sourceRes);
}
}
} catch (CmsException e) {
CmsCmisUtil.handleCmsException(e);
return null;
}
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#deleteContentStream(org.opencms.cmis.CmsCmisCallContext, org.apache.chemistry.opencmis.commons.spi.Holder, org.apache.chemistry.opencmis.commons.spi.Holder)
*/
public synchronized void deleteContentStream(
CmsCmisCallContext context,
Holder<String> objectId,
Holder<String> changeToken) {
throw new CmisConstraintException("Content streams may not be deleted.");
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#deleteObject(org.opencms.cmis.CmsCmisCallContext, java.lang.String, boolean)
*/
public synchronized void deleteObject(CmsCmisCallContext context, String objectId, boolean allVersions) {
checkWriteAccess();
getHelper(objectId).deleteObject(context, objectId, allVersions);
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#deleteTree(org.opencms.cmis.CmsCmisCallContext, java.lang.String, boolean, org.apache.chemistry.opencmis.commons.enums.UnfileObject, boolean)
*/
public synchronized FailedToDeleteData deleteTree(
CmsCmisCallContext context,
String folderId,
boolean allVersions,
UnfileObject unfileObjects,
boolean continueOnFailure) {
checkWriteAccess();
try {
FailedToDeleteDataImpl result = new FailedToDeleteDataImpl();
result.setIds(new ArrayList<String>());
CmsObject cms = getCmsObject(context);
CmsUUID structureId = new CmsUUID(folderId);
CmsResource folder = cms.readResource(structureId);
if (!folder.isFolder()) {
throw new CmisConstraintException("deleteTree can only be used on folders.");
}
ensureLock(cms, folder);
cms.deleteResource(folder.getRootPath(), CmsResource.DELETE_PRESERVE_SIBLINGS);
return result;
} catch (CmsException e) {
handleCmsException(e);
return null;
}
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getAcl(org.opencms.cmis.CmsCmisCallContext, java.lang.String, boolean)
*/
public synchronized Acl getAcl(CmsCmisCallContext context, String objectId, boolean onlyBasicPermissions) {
return getHelper(objectId).getAcl(context, objectId, onlyBasicPermissions);
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getAllowableActions(org.opencms.cmis.CmsCmisCallContext, java.lang.String)
*/
public synchronized AllowableActions getAllowableActions(CmsCmisCallContext context, String objectId) {
return getHelper(objectId).getAllowableActions(context, objectId);
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getCheckedOutDocs(org.opencms.cmis.CmsCmisCallContext, java.lang.String, java.lang.String, java.lang.String, boolean, org.apache.chemistry.opencmis.commons.enums.IncludeRelationships, java.lang.String, java.math.BigInteger, java.math.BigInteger)
*/
public synchronized ObjectList getCheckedOutDocs(
CmsCmisCallContext context,
String folderId,
String filter,
String orderBy,
boolean includeAllowableActions,
IncludeRelationships includeRelationships,
String renditionFilter,
BigInteger maxItems,
BigInteger skipCount) {
ObjectListImpl result = new ObjectListImpl();
result.setObjects(new ArrayList<ObjectData>());
return result;
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getChildren(org.opencms.cmis.CmsCmisCallContext, java.lang.String, java.lang.String, java.lang.String, boolean, org.apache.chemistry.opencmis.commons.enums.IncludeRelationships, java.lang.String, boolean, java.math.BigInteger, java.math.BigInteger)
*/
public synchronized ObjectInFolderList getChildren(
CmsCmisCallContext context,
String folderId,
String filter,
String orderBy,
boolean includeAllowableActions,
IncludeRelationships includeRelationships,
String renditionFilter,
boolean includePathSegment,
BigInteger maxItems,
BigInteger skipCount) {
try {
CmsCmisResourceHelper helper = getResourceHelper();
// split filter
Set<String> filterCollection = splitFilter(filter);
// skip and max
int skip = (skipCount == null ? 0 : skipCount.intValue());
if (skip < 0) {
skip = 0;
}
int max = (maxItems == null ? Integer.MAX_VALUE : maxItems.intValue());
if (max < 0) {
max = Integer.MAX_VALUE;
}
CmsObject cms = getCmsObject(context);
CmsUUID structureId = new CmsUUID(folderId);
CmsResource folder = cms.readResource(structureId);
if (!folder.isFolder()) {
throw new CmisObjectNotFoundException("Not a folder!");
}
// set object info of the the folder
if (context.isObjectInfoRequired()) {
helper.collectObjectData(context, cms, folder, null, false, false, includeRelationships);
}
// prepare result
ObjectInFolderListImpl result = new ObjectInFolderListImpl();
String folderSitePath = cms.getRequestContext().getSitePath(folder);
List<CmsResource> children = cms.getResourcesInFolder(folderSitePath, CmsResourceFilter.DEFAULT);
CmsObjectListLimiter<CmsResource> limiter = new CmsObjectListLimiter<CmsResource>(
children,
maxItems,
skipCount);
List<ObjectInFolderData> resultObjects = new ArrayList<ObjectInFolderData>();
for (CmsResource child : limiter) {
// build and add child object
ObjectInFolderDataImpl objectInFolder = new ObjectInFolderDataImpl();
objectInFolder.setObject(helper.collectObjectData(
context,
cms,
child,
filterCollection,
includeAllowableActions,
false,
includeRelationships));
if (includePathSegment) {
objectInFolder.setPathSegment(child.getName());
}
resultObjects.add(objectInFolder);
}
result.setObjects(resultObjects);
result.setNumItems(BigInteger.valueOf(children.size()));
result.setHasMoreItems(Boolean.valueOf(limiter.hasMore()));
return result;
} catch (CmsException e) {
handleCmsException(e);
return null;
}
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getConfiguration()
*/
public CmsParameterConfiguration getConfiguration() {
return m_parameterConfiguration;
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getContentStream(org.opencms.cmis.CmsCmisCallContext, java.lang.String, java.lang.String, java.math.BigInteger, java.math.BigInteger)
*/
public synchronized ContentStream getContentStream(
CmsCmisCallContext context,
String objectId,
String streamId,
BigInteger offset,
BigInteger length) {
try {
if ((offset != null) || (length != null)) {
throw new CmisInvalidArgumentException("Offset and Length are not supported!");
}
CmsObject cms = getCmsObject(context);
CmsResource resource = cms.readResource(new CmsUUID(objectId));
if (resource.isFolder()) {
throw new CmisStreamNotSupportedException("Not a file!");
}
CmsFile file = cms.readFile(resource);
byte[] contents;
contents = file.getContents();
contents = extractRange(contents, offset, length);
InputStream stream = new ByteArrayInputStream(contents);
ContentStreamImpl result = new ContentStreamImpl();
result.setFileName(file.getName());
result.setLength(BigInteger.valueOf(contents.length));
result.setMimeType(OpenCms.getResourceManager().getMimeType(file.getRootPath(), null, "text/plain"));
result.setStream(stream);
return result;
} catch (CmsException e) {
handleCmsException(e);
return null;
}
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getDescendants(org.opencms.cmis.CmsCmisCallContext, java.lang.String, java.math.BigInteger, java.lang.String, boolean, boolean, boolean)
*/
public synchronized List<ObjectInFolderContainer> getDescendants(
CmsCmisCallContext context,
String folderId,
BigInteger depth,
String filter,
boolean includeAllowableActions,
boolean includePathSegment,
boolean foldersOnly) {
try {
CmsCmisResourceHelper helper = getResourceHelper();
// check depth
int d = (depth == null ? 2 : depth.intValue());
if (d == 0) {
throw new CmisInvalidArgumentException("Depth must not be 0!");
}
if (d < -1) {
d = -1;
}
// split filter
Set<String> filterCollection = splitFilter(filter);
CmsObject cms = getCmsObject(context);
CmsUUID folderStructureId = new CmsUUID(folderId);
CmsResource folder = cms.readResource(folderStructureId);
if (!folder.isFolder()) {
throw new CmisObjectNotFoundException("Not a folder!");
}
// set object info of the the folder
if (context.isObjectInfoRequired()) {
helper.collectObjectData(context, cms, folder, null, false, false, IncludeRelationships.NONE);
}
// get the tree
List<ObjectInFolderContainer> result = new ArrayList<ObjectInFolderContainer>();
gatherDescendants(
context,
cms,
folder,
result,
foldersOnly,
d,
filterCollection,
includeAllowableActions,
includePathSegment);
return result;
} catch (CmsException e) {
handleCmsException(e);
return null;
}
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getDescription()
*/
public String getDescription() {
if (m_description != null) {
return m_description;
}
if (m_project != null) {
return m_project.getDescription();
}
return m_id;
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getFilter()
*/
public CmsRepositoryFilter getFilter() {
return m_filter;
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getFolderParent(org.opencms.cmis.CmsCmisCallContext, java.lang.String, java.lang.String)
*/
public synchronized ObjectData getFolderParent(CmsCmisCallContext context, String folderId, String filter) {
List<ObjectParentData> parents = getObjectParents(context, folderId, filter, false, false);
if (parents.size() == 0) {
throw new CmisInvalidArgumentException("The root folder has no parent!");
}
return parents.get(0).getObject();
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getId()
*/
public String getId() {
return m_id;
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getName()
*/
public String getName() {
return m_id;
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getObject(org.opencms.cmis.CmsCmisCallContext, java.lang.String, java.lang.String, boolean, org.apache.chemistry.opencmis.commons.enums.IncludeRelationships, java.lang.String, boolean, boolean)
*/
public synchronized ObjectData getObject(
CmsCmisCallContext context,
String objectId,
String filter,
boolean includeAllowableActions,
IncludeRelationships includeRelationships,
String renditionFilter,
boolean includePolicyIds,
boolean includeAcl) {
return getHelper(objectId).getObject(
context,
objectId,
filter,
includeAllowableActions,
includeRelationships,
renditionFilter,
includePolicyIds,
includeAcl);
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getObjectByPath(org.opencms.cmis.CmsCmisCallContext, java.lang.String, java.lang.String, boolean, org.apache.chemistry.opencmis.commons.enums.IncludeRelationships, java.lang.String, boolean, boolean)
*/
public synchronized ObjectData getObjectByPath(
CmsCmisCallContext context,
String path,
String filter,
boolean includeAllowableActions,
IncludeRelationships includeRelationships,
String renditionFilter,
boolean includePolicyIds,
boolean includeAcl
) {
try {
CmsCmisResourceHelper helper = getResourceHelper();
// split filter
Set<String> filterCollection = splitFilter(filter);
// check path
if (CmsStringUtil.isEmptyOrWhitespaceOnly(path)) {
throw new CmisInvalidArgumentException("Invalid folder path!");
}
CmsObject cms = getCmsObject(context);
CmsResource file = cms.readResource(path);
return helper.collectObjectData(
context,
cms,
file,
filterCollection,
includeAllowableActions,
includeAcl,
IncludeRelationships.NONE);
} catch (CmsException e) {
handleCmsException(e);
return null;
}
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getObjectParents(org.opencms.cmis.CmsCmisCallContext, java.lang.String, java.lang.String, boolean, boolean)
*/
public synchronized List<ObjectParentData> getObjectParents(
CmsCmisCallContext context,
String objectId,
String filter,
boolean includeAllowableActions,
boolean includeRelativePathSegment) {
try {
CmsCmisResourceHelper helper = getResourceHelper();
// split filter
Set<String> filterCollection = splitFilter(filter);
CmsObject cms = getCmsObject(context);
CmsUUID structureId = new CmsUUID(objectId);
CmsResource file = cms.readResource(structureId);
// don't climb above the root folder
if (m_root.equals(file)) {
return Collections.emptyList();
}
// set object info of the the object
if (context.isObjectInfoRequired()) {
helper.collectObjectData(context, cms, file, null, false, false, IncludeRelationships.NONE);
}
// get parent folder
CmsResource parent = cms.readParentFolder(file.getStructureId());
ObjectData object = helper.collectObjectData(
context,
cms,
parent,
filterCollection,
includeAllowableActions,
false,
IncludeRelationships.NONE);
ObjectParentDataImpl result = new ObjectParentDataImpl();
result.setObject(object);
if (includeRelativePathSegment) {
result.setRelativePathSegment(file.getName());
}
return Collections.singletonList((ObjectParentData)result);
} catch (CmsException e) {
handleCmsException(e);
return null;
}
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getObjectRelationships(org.opencms.cmis.CmsCmisCallContext, java.lang.String, boolean, org.apache.chemistry.opencmis.commons.enums.RelationshipDirection, java.lang.String, java.lang.String, boolean, java.math.BigInteger, java.math.BigInteger)
*/
public synchronized ObjectList getObjectRelationships(
CmsCmisCallContext context,
String objectId,
boolean includeSubRelationshipTypes,
RelationshipDirection relationshipDirection,
String typeId,
String filter,
boolean includeAllowableActions,
BigInteger maxItems,
BigInteger skipCount) {
try {
CmsObject cms = getCmsObject(context);
ObjectListImpl result = new ObjectListImpl();
CmsUUID structureId = new CmsUUID(objectId);
CmsResource resource = cms.readResource(structureId);
List<ObjectData> resultObjects = getRelationshipObjectData(
context,
cms,
resource,
relationshipDirection,
CmsCmisUtil.splitFilter(filter),
includeAllowableActions);
CmsObjectListLimiter<ObjectData> limiter = new CmsObjectListLimiter<ObjectData>(
resultObjects,
maxItems,
skipCount);
List<ObjectData> limitedResults = new ArrayList<ObjectData>();
for (ObjectData objectData : limiter) {
limitedResults.add(objectData);
}
result.setNumItems(BigInteger.valueOf(resultObjects.size()));
result.setHasMoreItems(Boolean.valueOf(limiter.hasMore()));
result.setObjects(limitedResults);
return result;
} catch (CmsException e) {
CmsCmisUtil.handleCmsException(e);
return null;
}
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getProperties(org.opencms.cmis.CmsCmisCallContext, java.lang.String, java.lang.String)
*/
public synchronized Properties getProperties(CmsCmisCallContext context, String objectId, String filter) {
ObjectData object = getObject(context, objectId, null, false, null, null, false, false);
return object.getProperties();
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getRepositoryInfo()
*/
public synchronized RepositoryInfo getRepositoryInfo() {
// compile repository info
RepositoryInfoImpl repositoryInfo = new RepositoryInfoImpl();
repositoryInfo.setId(m_id);
repositoryInfo.setName(getName());
repositoryInfo.setDescription(getDescription());
repositoryInfo.setCmisVersionSupported("1.0");
repositoryInfo.setProductName("OpenCms");
repositoryInfo.setProductVersion(OpenCms.getSystemInfo().getVersion());
repositoryInfo.setVendorName("Alkacon Software GmbH");
repositoryInfo.setRootFolder(m_root.getStructureId().toString());
repositoryInfo.setThinClientUri("");
repositoryInfo.setPrincipalAnonymous(OpenCms.getDefaultUsers().getUserGuest());
repositoryInfo.setChangesIncomplete(Boolean.TRUE);
RepositoryCapabilitiesImpl capabilities = new RepositoryCapabilitiesImpl();
capabilities.setCapabilityAcl(CapabilityAcl.DISCOVER);
capabilities.setAllVersionsSearchable(Boolean.FALSE);
capabilities.setCapabilityJoin(CapabilityJoin.NONE);
capabilities.setSupportsMultifiling(Boolean.FALSE);
capabilities.setSupportsUnfiling(Boolean.FALSE);
capabilities.setSupportsVersionSpecificFiling(Boolean.FALSE);
capabilities.setIsPwcSearchable(Boolean.FALSE);
capabilities.setIsPwcUpdatable(Boolean.FALSE);
capabilities.setCapabilityQuery(CapabilityQuery.NONE);
capabilities.setCapabilityChanges(CapabilityChanges.NONE);
capabilities.setCapabilityContentStreamUpdates(CapabilityContentStreamUpdates.ANYTIME);
capabilities.setSupportsGetDescendants(Boolean.TRUE);
capabilities.setSupportsGetFolderTree(Boolean.TRUE);
capabilities.setCapabilityRendition(CapabilityRenditions.NONE);
repositoryInfo.setCapabilities(capabilities);
AclCapabilitiesDataImpl aclCapability = new AclCapabilitiesDataImpl();
aclCapability.setSupportedPermissions(SupportedPermissions.BOTH);
aclCapability.setAclPropagation(AclPropagation.REPOSITORYDETERMINED);
List<PermissionDefinition> permissions = new ArrayList<PermissionDefinition>();
permissions.add(createPermission("cmis:read", "Read"));
permissions.add(createPermission("cmis:write", "Write"));
permissions.add(createPermission("cmis:all", "All"));
aclCapability.setPermissionDefinitionData(permissions);
// mappings
PermissionMappings m = new PermissionMappings();
m.add(PermissionMapping.CAN_CREATE_DOCUMENT_FOLDER, "cmis:write");
m.add(PermissionMapping.CAN_CREATE_FOLDER_FOLDER, "cmis:write");
m.add(PermissionMapping.CAN_DELETE_CONTENT_DOCUMENT, "cmis:write");
m.add(PermissionMapping.CAN_DELETE_OBJECT, "cmis:write");
m.add(PermissionMapping.CAN_DELETE_TREE_FOLDER, "cmis:write");
m.add(PermissionMapping.CAN_GET_ACL_OBJECT, "cmis:read");
m.add(PermissionMapping.CAN_GET_ALL_VERSIONS_VERSION_SERIES, "cmis:read");
m.add(PermissionMapping.CAN_GET_CHILDREN_FOLDER, "cmis:read");
m.add(PermissionMapping.CAN_GET_DESCENDENTS_FOLDER, "cmis:read");
m.add(PermissionMapping.CAN_GET_FOLDER_PARENT_OBJECT, "cmis:read");
m.add(PermissionMapping.CAN_GET_PARENTS_FOLDER, "cmis:read");
m.add(PermissionMapping.CAN_GET_PROPERTIES_OBJECT, "cmis:read");
m.add(PermissionMapping.CAN_MOVE_OBJECT, "cmis:write");
m.add(PermissionMapping.CAN_MOVE_SOURCE, "cmis:write");
m.add(PermissionMapping.CAN_MOVE_TARGET, "cmis:write");
m.add(PermissionMapping.CAN_SET_CONTENT_DOCUMENT, "cmis:write");
m.add(PermissionMapping.CAN_UPDATE_PROPERTIES_OBJECT, "cmis:write");
m.add(PermissionMapping.CAN_VIEW_CONTENT_OBJECT, "cmis:read");
aclCapability.setPermissionMappingData(m);
repositoryInfo.setAclCapabilities(aclCapability);
return repositoryInfo;
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getTypeChildren(org.opencms.cmis.CmsCmisCallContext, java.lang.String, boolean, java.math.BigInteger, java.math.BigInteger)
*/
public synchronized TypeDefinitionList getTypeChildren(
CmsCmisCallContext context,
String typeId,
boolean includePropertyDefinitions,
BigInteger maxItems,
BigInteger skipCount) {
return m_typeManager.getTypeChildren(typeId, includePropertyDefinitions, maxItems, skipCount);
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getTypeDefinition(org.opencms.cmis.CmsCmisCallContext, java.lang.String)
*/
public synchronized TypeDefinition getTypeDefinition(CmsCmisCallContext context, String typeId) {
return m_typeManager.getTypeDefinition(typeId);
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#getTypeDescendants(org.opencms.cmis.CmsCmisCallContext, java.lang.String, java.math.BigInteger, boolean)
*/
public synchronized List<TypeDefinitionContainer> getTypeDescendants(
CmsCmisCallContext context,
String typeId,
BigInteger depth,
boolean includePropertyDefinitions) {
return m_typeManager.getTypeDescendants(typeId, depth, includePropertyDefinitions);
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#initConfiguration()
*/
public void initConfiguration() throws CmsConfigurationException {
if (m_filter != null) {
m_filter.initConfiguration();
}
m_description = m_parameterConfiguration.getString("description", null);
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#initializeCms(org.opencms.file.CmsObject)
*/
public void initializeCms(CmsObject cms) throws CmsException {
m_adminCms = cms;
m_typeManager = CmsCmisTypeManager.getDefaultInstance(m_adminCms);
String projectName = m_parameterConfiguration.getString("project", "Online");
CmsResource root = m_adminCms.readResource("/");
CmsObject offlineCms = OpenCms.initCmsObject(m_adminCms);
CmsProject project = m_adminCms.readProject(projectName);
m_project = project;
offlineCms.getRequestContext().setCurrentProject(project);
m_adminCms = offlineCms;
m_root = root;
m_isReadOnly = project.isOnlineProject();
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#moveObject(org.opencms.cmis.CmsCmisCallContext, org.apache.chemistry.opencmis.commons.spi.Holder, java.lang.String, java.lang.String)
*/
public synchronized void moveObject(
CmsCmisCallContext context,
Holder<String> objectId,
String targetFolderId,
String sourceFolderId) {
checkWriteAccess();
try {
CmsObject cms = getCmsObject(context);
CmsUUID structureId = new CmsUUID(objectId.getValue());
CmsUUID targetStructureId = new CmsUUID(targetFolderId);
CmsResource targetFolder = cms.readResource(targetStructureId);
CmsResource resourceToMove = cms.readResource(structureId);
String name = CmsResource.getName(resourceToMove.getRootPath());
String newPath = CmsStringUtil.joinPaths(targetFolder.getRootPath(), name);
boolean wasLocked = ensureLock(cms, resourceToMove);
try {
cms.moveResource(resourceToMove.getRootPath(), newPath);
} finally {
if (wasLocked) {
CmsResource movedResource = cms.readResource(resourceToMove.getStructureId());
cms.unlockResource(movedResource);
}
}
} catch (CmsException e) {
handleCmsException(e);
}
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#setContentStream(org.opencms.cmis.CmsCmisCallContext, org.apache.chemistry.opencmis.commons.spi.Holder, boolean, org.apache.chemistry.opencmis.commons.spi.Holder, org.apache.chemistry.opencmis.commons.data.ContentStream)
*/
public synchronized void setContentStream(
CmsCmisCallContext context,
Holder<String> objectId,
boolean overwriteFlag,
Holder<String> changeToken,
ContentStream contentStream) {
checkWriteAccess();
try {
CmsObject cms = getCmsObject(context);
CmsUUID structureId = new CmsUUID(objectId.getValue());
if (!overwriteFlag) {
throw new CmisContentAlreadyExistsException();
}
CmsResource resource = cms.readResource(structureId);
if (resource.isFolder()) {
throw new CmisStreamNotSupportedException("Folders may not have content streams.");
}
CmsFile file = cms.readFile(resource);
InputStream contentInput = contentStream.getStream();
byte[] newContent = CmsFileUtil.readFully(contentInput);
file.setContents(newContent);
boolean wasLocked = ensureLock(cms, resource);
CmsFile newFile = cms.writeFile(file);
if (wasLocked) {
cms.unlockResource(newFile);
}
} catch (CmsException e) {
handleCmsException(e);
} catch (IOException e) {
throw new CmisRuntimeException(e.getLocalizedMessage(), e);
}
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#setFilter(org.opencms.repository.CmsRepositoryFilter)
*/
public void setFilter(CmsRepositoryFilter filter) {
m_filter = filter;
LOG.warn("Filters not supported by CMIS repositories, ignoring configuration...");
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#setName(java.lang.String)
*/
public void setName(String name) {
m_id = name;
}
/**
* @see org.opencms.cmis.I_CmsCmisRepository#updateProperties(org.opencms.cmis.CmsCmisCallContext, org.apache.chemistry.opencmis.commons.spi.Holder, org.apache.chemistry.opencmis.commons.spi.Holder, org.apache.chemistry.opencmis.commons.data.Properties)
*/
public synchronized void updateProperties(
CmsCmisCallContext context,
Holder<String> objectId,
Holder<String> changeToken,
Properties properties) {
checkWriteAccess();
try {
CmsObject cms = getCmsObject(context);
CmsUUID structureId = new CmsUUID(objectId.getValue());
CmsResource resource = cms.readResource(structureId);
Map<String, PropertyData<?>> propertyMap = properties.getProperties();
List<CmsProperty> cmsProperties = getOpenCmsProperties(propertyMap);
boolean wasLocked = ensureLock(cms, resource);
try {
cms.writePropertyObjects(resource, cmsProperties);
@SuppressWarnings("unchecked")
PropertyData<String> nameProperty = (PropertyData<String>)propertyMap.get(PropertyIds.NAME);
if (nameProperty != null) {
String newName = nameProperty.getFirstValue();
checkResourceName(newName);
String parentFolder = CmsResource.getParentFolder(resource.getRootPath());
String newPath = CmsStringUtil.joinPaths(parentFolder, newName);
cms.moveResource(resource.getRootPath(), newPath);
resource = cms.readResource(resource.getStructureId());
}
} finally {
if (wasLocked) {
cms.unlockResource(resource);
}
}
} catch (CmsException e) {
handleCmsException(e);
}
}
/**
* Checks whether we have write access to this repository and throws an exception otherwise.<p>
*/
protected void checkWriteAccess() {
if (m_isReadOnly) {
throw new CmisNotSupportedException("Readonly repository '" + m_id + "' does not allow write operations.");
}
}
/**
* Initializes a CMS context for the authentication data contained in a call context.<p>
*
* @param context the call context
* @return the initialized CMS context
*/
protected CmsObject getCmsObject(CmsCmisCallContext context) {
try {
if (context.getUsername() == null) {
// user name can be null
CmsObject cms = OpenCms.initCmsObject(OpenCms.getDefaultUsers().getUserGuest());
cms.getRequestContext().setCurrentProject(m_adminCms.getRequestContext().getCurrentProject());
return cms;
} else {
CmsObject cms = OpenCms.initCmsObject(m_adminCms);
CmsProject projectBeforeLogin = cms.getRequestContext().getCurrentProject();
cms.loginUser(context.getUsername(), context.getPassword());
cms.getRequestContext().setCurrentProject(projectBeforeLogin);
return cms;
}
} catch (CmsException e) {
throw new CmisPermissionDeniedException(e.getLocalizedMessage(), e);
}
}
/**
* Gets the relationship data for a given resource.<p>
*
* @param context the call context
* @param cms the CMS context
* @param resource the resource
* @param relationshipDirection the relationship direction
* @param filterSet the property filter
* @param includeAllowableActions true if allowable actions should be included
* @return the list of relationship data
*
* @throws CmsException if something goes wrong
*/
protected List<ObjectData> getRelationshipObjectData(
CmsCmisCallContext context,
CmsObject cms,
CmsResource resource,
RelationshipDirection relationshipDirection,
Set<String> filterSet,
boolean includeAllowableActions) throws CmsException {
List<ObjectData> resultObjects = new ArrayList<ObjectData>();
CmsRelationFilter relationFilter;
if (relationshipDirection == RelationshipDirection.SOURCE) {
relationFilter = CmsRelationFilter.TARGETS;
} else if (relationshipDirection == RelationshipDirection.TARGET) {
relationFilter = CmsRelationFilter.SOURCES;
} else {
relationFilter = CmsRelationFilter.ALL;
}
List<CmsRelation> unfilteredRelations = cms.getRelationsForResource(resource.getRootPath(), relationFilter);
List<CmsRelation> relations = new ArrayList<CmsRelation>();
for (CmsRelation relation : unfilteredRelations) {
if (relation.getTargetId().isNullUUID() || relation.getSourceId().isNullUUID()) {
continue;
}
relations.add(relation);
}
CmsCmisRelationHelper helper = getRelationHelper();
for (CmsRelation relation : relations) {
ObjectData objData = helper.collectObjectData(
context,
cms,
resource,
relation,
filterSet,
includeAllowableActions,
false);
resultObjects.add(objData);
}
return resultObjects;
}
/**
* Extracts the resource type from a set of CMIS properties.<p>
*
* @param properties the CMIS properties
* @param defaultValue the default value
*
* @return the resource type property, or the default value if the property was not found
*/
protected String getResourceTypeFromProperties(Map<String, PropertyData<?>> properties, String defaultValue) {
PropertyData<?> typeProp = properties.get(CmsCmisTypeManager.PROPERTY_RESOURCE_TYPE);
String resTypeName = defaultValue;
if (typeProp != null) {
resTypeName = (String)typeProp.getFirstValue();
}
return resTypeName;
}
/**
* Gets the type manager instance.<p>
*
* @return the type manager instance
*/
protected CmsCmisTypeManager getTypeManager() {
return m_typeManager;
}
/**
* Gets the correct helper object for a given object id to perform operations on the corresponding object.<p>
*
* @param objectId the object id
*
* @return the helper object to use for the given object id
*/
I_CmsCmisObjectHelper getHelper(String objectId) {
if (CmsUUID.isValidUUID(objectId)) {
return getResourceHelper();
} else if (CmsCmisRelationHelper.RELATION_PATTERN.matcher(objectId).matches()) {
return getRelationHelper();
} else {
return null;
}
}
/**
* Helper method to collect the descendants of a given folder.<p>
*
* @param context the call context
* @param cms the CMS context
* @param folder the parent folder
* @param list the list to which the descendants should be added
* @param foldersOnly flag to exclude files from the result
* @param depth the maximum depth
* @param filter the property filter
* @param includeAllowableActions flag to include allowable actions
* @param includePathSegments flag to include path segments
*/
private void gatherDescendants(
CmsCmisCallContext context,
CmsObject cms,
CmsResource folder,
List<ObjectInFolderContainer> list,
boolean foldersOnly,
int depth,
Set<String> filter,
boolean includeAllowableActions,
boolean includePathSegments) {
try {
CmsCmisResourceHelper helper = getResourceHelper();
List<CmsResource> children = cms.getResourcesInFolder(cms.getSitePath(folder), CmsResourceFilter.DEFAULT);
Collections.sort(children, new Comparator<CmsResource>() {
public int compare(CmsResource a, CmsResource b) {
return a.getName().compareTo(b.getName());
}
});
// iterate through children
for (CmsResource child : children) {
// folders only?
if (foldersOnly && !child.isFolder()) {
continue;
}
// add to list
ObjectInFolderDataImpl objectInFolder = new ObjectInFolderDataImpl();
objectInFolder.setObject(helper.collectObjectData(
context,
cms,
child,
filter,
includeAllowableActions,
false,
IncludeRelationships.NONE));
if (includePathSegments) {
objectInFolder.setPathSegment(child.getName());
}
ObjectInFolderContainerImpl container = new ObjectInFolderContainerImpl();
container.setObject(objectInFolder);
list.add(container);
// move to next level
if ((depth != 1) && child.isFolder()) {
container.setChildren(new ArrayList<ObjectInFolderContainer>());
gatherDescendants(
context,
cms,
child,
container.getChildren(),
foldersOnly,
depth - 1,
filter,
includeAllowableActions,
includePathSegments);
}
}
} catch (CmsException e) {
handleCmsException(e);
}
}
/**
* Gets the relation object helper.<p>
*
* @return the relation object helper
*/
private CmsCmisRelationHelper getRelationHelper() {
return m_relationHelper;
}
/**
* Gets the resource object helper.<p>
*
* @return the resource object helper
*/
private CmsCmisResourceHelper getResourceHelper() {
return m_resourceHelper;
}
}
|
package org.opencms.flex;
import org.opencms.file.CmsObject;
import org.opencms.file.CmsResource;
import org.opencms.main.CmsLog;
import org.opencms.util.CmsRequestUtil;
import java.util.List;
import java.util.Vector;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
/**
* Controller for getting access to the CmsObject, should be used as a
* request attribute.<p>
*
* @author Alexander Kandzior
*
* @version $Revision: 1.32 $
*
* @since 6.0.0
*/
public class CmsFlexController {
/** Constant for the controller request attribute name. */
public static final String ATTRIBUTE_NAME = "org.opencms.flex.CmsFlexController";
/** The log object for this class. */
private static final Log LOG = CmsLog.getLog(CmsFlexController.class);
/** The CmsFlexCache where the result will be cached in, required for the dispatcher. */
private CmsFlexCache m_cache;
/** The wrapped CmsObject provides JSP with access to the core system. */
private CmsObject m_cmsObject;
/** List of wrapped RequestContext info object. */
private List m_flexContextInfoList;
/** List of wrapped CmsFlexRequests. */
private List m_flexRequestList;
/** List of wrapped CmsFlexResponses. */
private List m_flexResponseList;
/** Indicates if this controller is currently in "forward" mode. */
private boolean m_forwardMode;
/** Wrapped top request. */
private HttpServletRequest m_req;
/** Wrapped top response. */
private HttpServletResponse m_res;
/** The CmsResource that was initialized by the original request, required for URI actions. */
private CmsResource m_resource;
/** Indicates if the respose should be streamed. */
private boolean m_streaming;
/** Exception that was caught during inclusion of sub elements. */
private Throwable m_throwable;
/** URI of a VFS resource that caused the exception. */
private String m_throwableResourceUri;
/** Indicates if the request is the top request. */
private boolean m_top;
/**
* Creates a new controller form the old one, exchaning just the provided OpenCms user context.<p>
*
* @param cms the OpenCms user context for this controller
* @param base the base controller
*/
public CmsFlexController(CmsObject cms, CmsFlexController base) {
m_cmsObject = cms;
m_resource = base.m_resource;
m_cache = base.m_cache;
m_req = base.m_req;
m_res = base.m_res;
m_streaming = base.m_streaming;
m_top = base.m_top;
m_flexRequestList = base.m_flexRequestList;
m_flexResponseList = base.m_flexResponseList;
m_flexContextInfoList = base.m_flexContextInfoList;
m_forwardMode = base.m_forwardMode;
m_throwableResourceUri = base.m_throwableResourceUri;
}
/**
* Default constructor.<p>
*
* @param cms the initial CmsObject to wrap in the controller
* @param resource the file requested
* @param cache the instance of the flex cache
* @param req the current request
* @param res the current response
* @param streaming indicates if the response is streaming
* @param top indicates if the response is the top response
*/
public CmsFlexController(
CmsObject cms,
CmsResource resource,
CmsFlexCache cache,
HttpServletRequest req,
HttpServletResponse res,
boolean streaming,
boolean top) {
m_cmsObject = cms;
m_resource = resource;
m_cache = cache;
m_req = req;
m_res = res;
m_streaming = streaming;
m_top = top;
m_flexRequestList = new Vector();
m_flexResponseList = new Vector();
m_flexContextInfoList = new Vector();
m_forwardMode = false;
m_throwableResourceUri = null;
}
/**
* Returns the wrapped CmsObject form the provided request, or <code>null</code> if the
* request is not running inside OpenCms.<p>
*
* @param req the current request
* @return the wrapped CmsObject
*/
public static CmsObject getCmsObject(ServletRequest req) {
CmsFlexController controller = (CmsFlexController)req.getAttribute(ATTRIBUTE_NAME);
if (controller != null) {
return controller.getCmsObject();
} else {
return null;
}
}
/**
* Returns the controller from the given request, or <code>null</code> if the
* request is not running inside OpenCms.<p>
*
* @param req the request to get the controller from
*
* @return the controller from the given request, or <code>null</code> if the request is not running inside OpenCms
*/
public static CmsFlexController getController(ServletRequest req) {
return (CmsFlexController)req.getAttribute(ATTRIBUTE_NAME);
}
/**
* Provides access to a root cause Exception that might have occured in a complex include scenario.<p>
*
* @param req the current request
* @return the root cause exception or null if no root cause exception is available
* @see #getThrowable()
*/
public static Throwable getThrowable(ServletRequest req) {
CmsFlexController controller = (CmsFlexController)req.getAttribute(ATTRIBUTE_NAME);
if (controller != null) {
return controller.getThrowable();
} else {
return null;
}
}
/**
* Provides access to URI of a VFS resource that caused an exception that might have occured in a complex inlucde scenario.<p>
*
* @param req the current request
* @return to URI of a VFS resource that caused an exception, or null
* @see #getThrowableResourceUri()
*/
public static String getThrowableResourceUri(ServletRequest req) {
CmsFlexController controller = (CmsFlexController)req.getAttribute(ATTRIBUTE_NAME);
if (controller != null) {
return controller.getThrowableResourceUri();
} else {
return null;
}
}
/**
* Checks if the provided request is running in OpenCms.<p>
*
* @param req the current request
* @return true if the request is running in OpenCms, false otherwise
*/
public static boolean isCmsRequest(ServletRequest req) {
return ((req != null) && (req.getAttribute(ATTRIBUTE_NAME) != null));
}
/**
* Checks if the request has the "If-Modified-Since" header set, and if so,
* if the header date value is equal to the provided last modification date.<p>
*
* The "expires" information is automatically also checked since if a page recently
* expired, the date of last modification is set to the expiration date.<p>
*
* @param req the request to set the "If-Modified-Since" date header from
* @param dateLastModified the date to compare the header with
*
* @return <code>true</code> if the header is set and the header date is equal to the provided date
*/
public static boolean isNotModifiedSince(HttpServletRequest req, long dateLastModified) {
// check if the request contains a last modified header
long lastModifiedHeader = req.getDateHeader(CmsRequestUtil.HEADER_IF_MODIFIED_SINCE);
// if last modified header is set (> -1), compare it to the requested resource
return ((lastModifiedHeader > -1) && (((dateLastModified / 1000) * 1000) == lastModifiedHeader));
}
/**
* Removes the controller attribute from a request.<p>
*
* @param req the request to remove the controller from
*/
public static void removeController(ServletRequest req) {
CmsFlexController controller = (CmsFlexController)req.getAttribute(ATTRIBUTE_NAME);
if (controller != null) {
controller.clear();
}
}
/**
* Stores the given controller in the given request (using a request attribute).<p>
*
* @param req the request where to store the controller in
* @param controller the controller to store
*/
public static void setController(ServletRequest req, CmsFlexController controller) {
req.setAttribute(CmsFlexController.ATTRIBUTE_NAME, controller);
}
/**
* Sets the "expires" date header for a given http request.<p>
*
* @param res the reponse to set the "expires" date header for
* @param maxAge maximum amount of time in milliseconds the response remains valid
* @param dateExpires the date to set (if this is not in the future, it is ignored)
*/
public static void setDateExpiresHeader(HttpServletResponse res, long dateExpires, long maxAge) {
long now = System.currentTimeMillis();
if ((dateExpires > now) && (dateExpires != CmsResource.DATE_EXPIRED_DEFAULT)) {
// important: many caches (browsers or proxy) use the "Expires" header
// to avoid re-loading of pages that are not expired
// while this is right in general, no changes before the expiration date
// will be displayed
// therefore it is better to not use an expiration to far in the future
// if no valid max age is set, restrict it to 24 hrs
if (maxAge < 0L) {
maxAge = 86400000;
}
if ((dateExpires - now) > maxAge) {
// set "Expires" header max one day into the future
dateExpires = now + maxAge;
}
res.setDateHeader(CmsRequestUtil.HEADER_EXPIRES, dateExpires);
res.setHeader(CmsRequestUtil.HEADER_CACHE_CONTROL, CmsRequestUtil.HEADER_VALUE_MAX_AGE + (maxAge/1000L));
}
}
/**
* Sets the "last modified" date header for a given http request.<p>
*
* @param res the reponse to set the "last modified" date header for
* @param dateLastModified the date to set (if this is lower then 0, the current time is set)
*/
public static void setDateLastModifiedHeader(HttpServletResponse res, long dateLastModified) {
if (dateLastModified > -1) {
// set date last modified header (precision is only second, not millisecond
res.setDateHeader(CmsRequestUtil.HEADER_LAST_MODIFIED, (dateLastModified / 1000) * 1000);
} else {
// this resource can not be optimized for "last modified", use current time as header
res.setDateHeader(CmsRequestUtil.HEADER_LAST_MODIFIED, System.currentTimeMillis());
}
}
/**
* Clears all data of this controller.<p>
*/
public void clear() {
if (m_flexRequestList != null) {
m_flexRequestList.clear();
}
m_flexRequestList = null;
if (m_flexResponseList != null) {
m_flexResponseList.clear();
}
m_flexResponseList = null;
if (m_req != null) {
m_req.removeAttribute(ATTRIBUTE_NAME);
}
m_req = null;
m_res = null;
m_cmsObject = null;
m_resource = null;
m_cache = null;
m_throwable = null;
}
/**
* Returns the CmsFlexCache instance where all results from this request will be cached in.<p>
*
* This is public so that pages like the Flex Cache Administration page
* have a way to access the cache object.<p>
*
* @return the CmsFlexCache instance where all results from this request will be cached in
*/
public CmsFlexCache getCmsCache() {
return m_cache;
}
/**
* Returns the wrapped CmsObject.<p>
*
* @return the wrapped CmsObject
*/
public CmsObject getCmsObject() {
return m_cmsObject;
}
/**
* This method provides access to the top-level CmsResource of the request
* which is of a type that supports the FlexCache,
* i.e. usually the CmsFile that is identical to the file uri requested by the user,
* not he current included element.<p>
*
* @return the requested top-level CmsFile
*/
public CmsResource getCmsResource() {
return m_resource;
}
/**
* Returns the current flex request.<p>
*
* @return the current flex request
*/
public CmsFlexRequest getCurrentRequest() {
return (CmsFlexRequest)m_flexRequestList.get(m_flexRequestList.size() - 1);
}
/**
* Returns the current flex response.<p>
*
* @return the current flex response
*/
public CmsFlexResponse getCurrentResponse() {
return (CmsFlexResponse)m_flexResponseList.get(m_flexResponseList.size() - 1);
}
/**
* Returns the combined "expires" date for all resources read during this request.<p>
*
* @return the combined "expires" date for all resources read during this request
*/
public long getDateExpires() {
int pos = m_flexContextInfoList.size() - 1;
if (pos < 0) {
// ensure a valid position is used
return CmsResource.DATE_EXPIRED_DEFAULT;
}
return ((CmsFlexRequestContextInfo)m_flexContextInfoList.get(pos)).getDateExpires();
}
/**
* Returns the combined "last modified" date for all resources read during this request.<p>
*
* @return the combined "last modified" date for all resources read during this request
*/
public long getDateLastModified() {
int pos = m_flexContextInfoList.size() - 1;
if (pos < 0) {
// ensure a valid position is used
return CmsResource.DATE_RELEASED_DEFAULT;
}
return ((CmsFlexRequestContextInfo)m_flexContextInfoList.get(pos)).getDateLastModified();
}
/**
* Returns the size of the response stack.<p>
*
* @return the size of the response stack
*/
public int getResponseStackSize() {
return m_flexResponseList.size();
}
/**
* Returns an exception (Throwable) that was caught during inclusion of sub elements,
* or null if no exceptions where thrown in sub elements.<p>
*
* @return an exception (Throwable) that was caught during inclusion of sub elements
*/
public Throwable getThrowable() {
return m_throwable;
}
/**
* Returns the URI of a VFS resource that caused the exception that was caught during inclusion of sub elements,
* might return null if no URI information was available for the exception.<p>
*
* @return the URI of a VFS resource that caused the exception that was caught during inclusion of sub elements
*/
public String getThrowableResourceUri() {
return m_throwableResourceUri;
}
/**
* Returns the current http request.<p>
*
* @return the current http request
*/
public HttpServletRequest getTopRequest() {
return m_req;
}
/**
* Returns the current http response.<p>
*
* @return the current http response
*/
public HttpServletResponse getTopResponse() {
return m_res;
}
/**
* Returns <code>true</code> if the controller does not yet contain any requests.<p>
*
* @return <code>true</code> if the controller does not yet contain any requests
*/
public boolean isEmptyRequestList() {
return (m_flexRequestList != null) && m_flexRequestList.isEmpty();
}
/**
* Returns <code>true</code> if this controller is currently in "forward" mode.<p>
*
* @return <code>true</code> if this controller is currently in "forward" mode
*/
public boolean isForwardMode() {
return m_forwardMode;
}
/**
* Returns <code>true</code> if the generated output of the response should
* be written to the stream directly.<p>
*
* @return <code>true</code> if the generated output of the response should be written to the stream directly
*/
public boolean isStreaming() {
return m_streaming;
}
/**
* Returns <code>true</code> if this controller was generated as top level controller.<p>
*
* If a resource (e.g. a JSP) is processed and it's content is included in
* another resource, then this will be <code>false</code>.
*
* @return <code>true</code> if this controller was generated as top level controller
* @see org.opencms.loader.I_CmsResourceLoader#dump(CmsObject, CmsResource, String, java.util.Locale, HttpServletRequest, HttpServletResponse)
* @see org.opencms.jsp.CmsJspActionElement#getContent(String)
*/
public boolean isTop() {
return m_top;
}
/**
* Removes the topmost request/response pair from the stack.<p>
*/
public void pop() {
if (m_flexRequestList.size() > 0) {
m_flexRequestList.remove(m_flexRequestList.size() - 1);
}
if (m_flexResponseList.size() > 0) {
m_flexResponseList.remove(m_flexResponseList.size() - 1);
}
if (m_flexContextInfoList.size() > 0) {
CmsFlexRequestContextInfo info = (CmsFlexRequestContextInfo)m_flexContextInfoList.remove(m_flexContextInfoList.size() - 1);
if (m_flexContextInfoList.size() > 0) {
((CmsFlexRequestContextInfo)m_flexContextInfoList.get(0)).merge(info);
updateRequestContextInfo();
}
}
}
/**
* Adds another flex request/response pair to the stack.<p>
*
* @param req the request to add
* @param res the response to add
*/
public void push(CmsFlexRequest req, CmsFlexResponse res) {
m_flexRequestList.add(req);
m_flexResponseList.add(res);
m_flexContextInfoList.add(new CmsFlexRequestContextInfo());
updateRequestContextInfo();
}
/**
* Sets the value of the "forward mode" flag.<p>
*
* @param value the forward mode to set
*/
public void setForwardMode(boolean value) {
m_forwardMode = value;
}
/**
* Sets an exception (Throwable) that was caught during inclusion of sub elements.<p>
*
* If another exception is already set in this controller, then the additional exception
* is ignored.
*
* @param throwable the exception (Throwable) to set
* @param resource the URI of the VFS resource the error occured on (might be null if unknown)
* @return the exception stored in the contoller
*/
public Throwable setThrowable(Throwable throwable, String resource) {
if (m_throwable == null) {
m_throwable = throwable;
m_throwableResourceUri = resource;
} else {
if (LOG.isDebugEnabled()) {
if (resource != null) {
LOG.debug(Messages.get().key(Messages.LOG_FLEXCONTROLLER_IGNORED_EXCEPTION_1, resource));
} else {
LOG.debug(Messages.get().key(Messages.LOG_FLEXCONTROLLER_IGNORED_EXCEPTION_0));
}
}
}
return m_throwable;
}
/**
* Puts the response in a suspended state.<p>
*/
public void suspendFlexResponse() {
for (int i = 0; i < m_flexResponseList.size(); i++) {
CmsFlexResponse res = (CmsFlexResponse)m_flexResponseList.get(i);
res.setSuspended(true);
}
}
/**
* Updates the "last modified" date and the "expires" date
* for all resources read during this request with the given values.<p>
*
* The currently stored value for "last modified" is only updated with the new value if
* the new value is either larger (i.e. newer) then the stored value,
* or if the new value is less then zero, which indicates that the "last modified"
* optimization can not be used because the element is dynamic.<p>
*
* The stored "expires" value is only updated if the new value is smaller
* then the stored value.<p>
*
* @param dateLastModified the value to update the "last modified" date with
* @param dateExpires the value to update the "expires" date with
*/
public void updateDates(long dateLastModified, long dateExpires) {
int pos = m_flexContextInfoList.size() - 1;
if (pos < 0) {
// ensure a valid position is used
return;
}
((CmsFlexRequestContextInfo)m_flexContextInfoList.get(pos)).updateDates(dateLastModified, dateExpires);
}
/**
* Updates the context info of the request context.<p>
*/
private void updateRequestContextInfo() {
if (m_flexContextInfoList.size() > 0) {
m_cmsObject.getRequestContext().setAttribute(
CmsRequestUtil.HEADER_LAST_MODIFIED,
m_flexContextInfoList.get(m_flexContextInfoList.size() - 1));
}
}
}
|
// This file is part of the Ouzel engine.
package lv.elviss.ouzel;
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.os.Build;
import android.view.KeyEvent;
import android.view.MotionEvent;
public class SurfaceView extends GLSurfaceView implements InputDeviceListener
{
private final InputManager inputManager;
public SurfaceView(Context context)
{
super(context);
inputManager = new InputManager();
inputManager.registerInputDeviceListener(this, null);
this.setFocusableInTouchMode(true);
this.setEGLConfigChooser(new ConfigChooser(8, 8, 8, 8, 0, 0));
//this.setPreserveEGLContextOnPause(true);
this.setEGLContextFactory(new ContextFactory());
this.setEGLContextClientVersion(2);
this.setRenderer(new RendererWrapper());
}
@Override
public boolean onTouchEvent(final MotionEvent event)
{
switch (event.getAction() & MotionEvent.ACTION_MASK)
{
case MotionEvent.ACTION_POINTER_DOWN:
{
final int indexPointerDown = event.getAction() >> MotionEvent.ACTION_POINTER_INDEX_SHIFT;
final int pointerId = event.getPointerId(indexPointerDown);
final float x = event.getX(indexPointerDown);
final float y = event.getY(indexPointerDown);
OuzelLibJNIWrapper.handleActionDown(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_DOWN:
{
final int pointerId = event.getPointerId(0);
final float x = event.getX(0);
final float y = event.getY(0);
OuzelLibJNIWrapper.handleActionDown(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_MOVE:
{
final int pointerId = event.getPointerId(0);
final float x = event.getX(0);
final float y = event.getY(0);
OuzelLibJNIWrapper.handleActionMove(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_POINTER_UP:
{
final int indexPointUp = event.getAction() >> MotionEvent.ACTION_POINTER_INDEX_SHIFT;
final int pointerId = event.getPointerId(indexPointUp);
final float x = event.getX(indexPointUp);
final float y = event.getY(indexPointUp);
OuzelLibJNIWrapper.handleActionUp(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_UP:
{
final int pointerId = event.getPointerId(0);
final float x = event.getX(0);
final float y = event.getY(0);
OuzelLibJNIWrapper.handleActionUp(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_CANCEL:
{
final int pointerId = event.getPointerId(0);
final float x = event.getX(0);
final float y = event.getY(0);
OuzelLibJNIWrapper.handleActionCancel(pointerId, x, y);
return true;
}
}
return super.onTouchEvent(event);
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event)
{
inputManager.onInputEvent(event);
// TODO: implement
return super.onKeyDown(keyCode, event);
}
@Override
public boolean onKeyUp(int keyCode, KeyEvent event)
{
inputManager.onInputEvent(event);
// TODO: implement
return super.onKeyUp(keyCode, event);
}
@Override
public void onInputDeviceAdded(int deviceId)
{
// TODO: implement
}
@Override
public void onInputDeviceChanged(int deviceId)
{
// TODO: implement
}
@Override
public void onInputDeviceRemoved(int deviceId)
{
// TODO: implement
}
}
|
package org.pentaho.di.trans.step;
import java.io.IOException;
import java.net.ServerSocket;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.ResultFile;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.RowSet;
import org.pentaho.di.core.config.ConfigManager;
import org.pentaho.di.core.config.KettleConfig;
import org.pentaho.di.core.exception.KettleConfigException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleRowException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleStepLoaderException;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.logging.LogWriter;
import org.pentaho.di.core.row.RowDataUtil;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.variables.Variables;
import org.pentaho.di.partition.PartitionSchema;
import org.pentaho.di.trans.SlaveStepCopyPartitionDistribution;
import org.pentaho.di.trans.StepLoader;
import org.pentaho.di.trans.StepPlugin;
import org.pentaho.di.trans.StepPluginMeta;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.cluster.TransSplitter;
public class BaseStep extends Thread implements VariableSpace, StepInterface
{
private VariableSpace variables = new Variables();
public static StepPluginMeta[] steps = null;
static
{
//TODO: Move this out of this class
synchronized(BaseStep.class)
{
try
{
//annotated classes first
ConfigManager<?> stepsAnntCfg = KettleConfig.getInstance().getManager("steps-annotation-config");
Collection<StepPluginMeta> mainSteps = stepsAnntCfg.loadAs(StepPluginMeta.class);
ConfigManager<?> stepsCfg = KettleConfig.getInstance().getManager("steps-xml-config");
Collection<StepPluginMeta> csteps = stepsCfg.loadAs(StepPluginMeta.class);
mainSteps.addAll(csteps);
steps = mainSteps.toArray(new StepPluginMeta[mainSteps.size()]);
}
catch(KettleConfigException e)
{
e.printStackTrace();
throw new RuntimeException(e.getMessage());
}
}
}
/* public static final StepPluginMeta[] steps =
{
TODO: port these steps
new StepPluginMeta(WebServiceMeta.class, "WebServiceLookup", Messages.getString("BaseStep.TypeLongDesc.WebServiceLookup"), Messages.getString("BaseStep.TypeTooltipDesc.WebServiceLookup"), "WSL.png", CATEGORY_EXPERIMENTAL),
new StepPluginMeta(FormulaMeta.class, "Formula", Messages.getString("BaseStep.TypeLongDesc.Formula"), Messages.getString("BaseStep.TypeTooltipDesc.Formula"), "FRM.png", CATEGORY_EXPERIMENTAL),
};*/
public static final String category_order[] =
{
StepCategory.INPUT.getName(),
StepCategory.OUTPUT.getName(),
StepCategory.LOOKUP.getName(),
StepCategory.TRANSFORM.getName(),
StepCategory.JOINS.getName(),
StepCategory.SCRIPTING.getName(),
StepCategory.DATA_WAREHOUSE.getName(),
StepCategory.MAPPING.getName(),
StepCategory.JOB.getName(),
StepCategory.INLINE.getName(),
StepCategory.EXPERIMENTAL.getName(),
StepCategory.DEPRECATED.getName(),
StepCategory.BULK.getName(),
};
public static final String[] statusDesc = {
Messages.getString("BaseStep.status.Empty"),
Messages.getString("BaseStep.status.Init"),
Messages.getString("BaseStep.status.Running"),
Messages.getString("BaseStep.status.Idle"),
Messages.getString("BaseStep.status.Finished"),
Messages.getString("BaseStep.status.Stopped"),
Messages.getString("BaseStep.status.Disposed"),
Messages.getString("BaseStep.status.Halted"),
Messages.getString("BaseStep.status.Paused"),
};
private TransMeta transMeta;
private StepMeta stepMeta;
private String stepname;
protected LogWriter log;
private Trans trans;
private Object statusCountersLock = new Object();
/** nr of lines read from previous step(s)
* @deprecated please use the supplied getters, setters and increment/decrement methods
*/
public long linesRead;
/** nr of lines written to next step(s)
* @deprecated please use the supplied getters, setters and increment/decrement methods
*/
public long linesWritten;
/** nr of lines read from file or database
* @deprecated please use the supplied getters, setters and increment/decrement methods
*/
public long linesInput;
/** nr of lines written to file or database
* @deprecated please use the supplied getters, setters and increment/decrement methods
*/
public long linesOutput;
/** nr of updates in a database table or file
* @deprecated please use the supplied getters, setters and increment/decrement methods
*/
public long linesUpdated;
/** nr of lines skipped
* @deprecated please use the supplied getters, setters and increment/decrement methods
*/
public long linesSkipped;
/** total sleep time in ns caused by an empty input buffer (previous step is slow)
* @deprecated please use the supplied getters, setters and increment/decrement methods
*/
public long linesRejected;
private boolean distributed;
private long errors;
private StepMeta nextSteps[];
private StepMeta prevSteps[];
private int currentInputRowSetNr, currentOutputRowSetNr;
public List<BaseStep> thr;
/** The rowsets on the input, size() == nr of source steps */
public ArrayList<RowSet> inputRowSets;
/** the rowsets on the output, size() == nr of target steps */
public ArrayList<RowSet> outputRowSets;
/** The remote input steps. */
public List<RemoteStep> remoteInputSteps;
/** The remote output steps. */
public List<RemoteStep> remoteOutputSteps;
/** the rowset for the error rows */
public RowSet errorRowSet;
public AtomicBoolean stopped;
public AtomicBoolean paused;
public boolean waiting;
public boolean init;
/** the copy number of this thread */
private int stepcopy;
private Date start_time, stop_time;
public boolean first;
public boolean terminator;
public List<Object[]> terminator_rows;
private StepMetaInterface stepMetaInterface;
private StepDataInterface stepDataInterface;
/** The list of RowListener interfaces */
private List<RowListener> rowListeners;
/**
* Map of files that are generated or used by this step. After execution, these can be added to result.
* The entry to the map is the filename
*/
private Map<String,ResultFile> resultFiles;
/**
* Set this to true if you want to have extra checking enabled on the rows that are entering this step. All too
* often people send in bugs when it is really the mixing of different types of rows that is causing the problem.
*/
private boolean safeModeEnabled;
/**
* This contains the first row received and will be the reference row. We used it to perform extra checking: see if
* we don't get rows with "mixed" contents.
*/
private RowMetaInterface inputReferenceRow;
/**
* This field tells the putRow() method that we are in partitioned mode
*/
private boolean partitioned;
/**
* The partition ID at which this step copy runs, or null if this step is not running partitioned.
*/
private String partitionID;
/**
* This field tells the putRow() method to re-partition the incoming data, See also StepPartitioningMeta.PARTITIONING_METHOD_*
*/
private int repartitioning;
/**
* The partitionID to rowset mapping
*/
private Map<String,RowSet> partitionTargets;
private RowMetaInterface inputRowMeta;
/**
* step partitioning information of the NEXT step
*/
private StepPartitioningMeta nextStepPartitioningMeta;
/** The metadata information of the error output row. There is only one per step so we cache it */
private RowMetaInterface errorRowMeta = null;
private RowMetaInterface previewRowMeta;
private boolean checkTransRunning;
private int slaveNr;
private int clusterSize;
private int uniqueStepNrAcrossSlaves;
private int uniqueStepCountAcrossSlaves;
private boolean remoteOutputStepsInitialized;
private boolean remoteInputStepsInitialized;
private RowSet[] partitionNrRowSetList;
/** A list of server sockets that need to be closed during transformation cleanup. */
private List<ServerSocket> serverSockets;
private static int NR_OF_ROWS_IN_BLOCK = 500;
private int blockPointer;
/**
* A flag to indicate that clustered partitioning was not yet initialized
*/
private boolean clusteredPartitioningFirst;
/**
* A flag to determine whether or not we are doing local or clustered (remote) par
*/
private boolean clusteredPartitioning;
private boolean usingThreadPriorityManagment;
private List<StepListener> stepListeners;
/**
* This is the base step that forms that basis for all steps. You can derive from this class to implement your own
* steps.
*
* @param stepMeta The StepMeta object to run.
* @param stepDataInterface the data object to store temporary data, database connections, caches, result sets,
* hashtables etc.
* @param copyNr The copynumber for this step.
* @param transMeta The TransInfo of which the step stepMeta is part of.
* @param trans The (running) transformation to obtain information shared among the steps.
*/
public BaseStep(StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans)
{
log = LogWriter.getInstance();
this.stepMeta = stepMeta;
this.stepDataInterface = stepDataInterface;
this.stepcopy = copyNr;
this.transMeta = transMeta;
this.trans = trans;
this.stepname = stepMeta.getName();
// Set the name of the thread
if (stepMeta.getName() != null)
{
setName(toString() + " (" + super.getName() + ")");
}
else
{
throw new RuntimeException("A step in transformation [" + transMeta.toString()
+ "] doesn't have a name. A step should always have a name to identify it by.");
}
first = true;
clusteredPartitioningFirst=true;
stopped = new AtomicBoolean(false);;
paused = new AtomicBoolean(false);;
init = false;
synchronized (statusCountersLock) {
linesRead = 0L; // new AtomicLong(0L); // Keep some statistics!
linesWritten = 0L; // new AtomicLong(0L);
linesUpdated = 0L; // new AtomicLong(0L);
linesSkipped = 0L; // new AtomicLong(0L);
linesRejected = 0L; // new AtomicLong(0L);
linesInput = 0L; // new AtomicLong(0L);
linesOutput = 0L; //new AtomicLong(0L);
}
inputRowSets = null;
outputRowSets = null;
nextSteps = null;
terminator = stepMeta.hasTerminator();
if (terminator)
{
terminator_rows = new ArrayList<Object[]>();
}
else
{
terminator_rows = null;
}
// debug="-"; //$NON-NLS-1$
start_time = null;
stop_time = null;
distributed = stepMeta.isDistributes();
if (distributed) if (log.isDetailed())
logDetailed(Messages.getString("BaseStep.Log.DistributionActivated")); //$NON-NLS-1$
else
if (log.isDetailed()) logDetailed(Messages.getString("BaseStep.Log.DistributionDeactivated")); //$NON-NLS-1$
rowListeners = new ArrayList<RowListener>();
resultFiles = new Hashtable<String,ResultFile>();
repartitioning = StepPartitioningMeta.PARTITIONING_METHOD_NONE;
partitionTargets = new Hashtable<String,RowSet>();
serverSockets = new ArrayList<ServerSocket>();
// tuning parameters
// putTimeOut = 10; //s
// getTimeOut = 500; //s
// timeUnit = TimeUnit.MILLISECONDS;
// the smaller singleWaitTime, the faster the program run but cost CPU
// singleWaitTime = 1; //ms
// maxPutWaitCount = putTimeOut*1000/singleWaitTime;
// maxGetWaitCount = getTimeOut*1000/singleWaitTime;
//worker = Executors.newFixedThreadPool(10);
checkTransRunning = false;
blockPointer = 0;
stepListeners = new ArrayList<StepListener>();
dispatch();
}
public boolean init(StepMetaInterface smi, StepDataInterface sdi)
{
sdi.setStatus(StepDataInterface.STATUS_INIT);
String slaveNr = transMeta.getVariable(Const.INTERNAL_VARIABLE_SLAVE_SERVER_NUMBER);
String clusterSize = transMeta.getVariable(Const.INTERNAL_VARIABLE_CLUSTER_SIZE);
boolean master = "Y".equalsIgnoreCase(transMeta.getVariable(Const.INTERNAL_VARIABLE_CLUSTER_MASTER));
if (!Const.isEmpty(slaveNr) && !Const.isEmpty(clusterSize) && !master)
{
this.slaveNr = Integer.parseInt(slaveNr);
this.clusterSize = Integer.parseInt(clusterSize);
if (log.isDetailed()) logDetailed("Running on slave server #"+slaveNr+"/"+clusterSize+".");
}
else
{
this.slaveNr = 0;
this.clusterSize = 0;
}
// Also set the internal variable for the partition
SlaveStepCopyPartitionDistribution partitionDistribution = transMeta.getSlaveStepCopyPartitionDistribution();
if (stepMeta.isPartitioned())
{
// See if we are partitioning remotely
if (partitionDistribution!=null && !partitionDistribution.getDistribution().isEmpty())
{
String slaveServerName = getVariable(Const.INTERNAL_VARIABLE_SLAVE_SERVER_NAME);
int stepCopyNr = stepcopy;
// Look up the partition nr...
// Set the partition ID (string) as well as the partition nr [0..size[
PartitionSchema partitionSchema = stepMeta.getStepPartitioningMeta().getPartitionSchema();
int partitionNr = partitionDistribution.getPartition(slaveServerName, partitionSchema.getName(), stepCopyNr);
if (partitionNr>=0) {
String partitionNrString = new DecimalFormat("000").format(partitionNr);
setVariable(Const.INTERNAL_VARIABLE_STEP_PARTITION_NR, partitionNrString);
if (partitionDistribution.getOriginalPartitionSchemas()!=null) {
// What is the partition schema name?
String partitionSchemaName = stepMeta.getStepPartitioningMeta().getPartitionSchema().getName();
// Search the original partition schema in the distribution...
for (PartitionSchema originalPartitionSchema : partitionDistribution.getOriginalPartitionSchemas()) {
String slavePartitionSchemaName = TransSplitter.createSlavePartitionSchemaName(originalPartitionSchema.getName());
if (slavePartitionSchemaName.equals(partitionSchemaName)) {
PartitionSchema schema = (PartitionSchema) originalPartitionSchema.clone();
// This is the one...
if (schema.isDynamicallyDefined()) {
schema.expandPartitionsDynamically(this.clusterSize, this);
}
String partID = schema.getPartitionIDs().get(partitionNr);
setVariable(Const.INTERNAL_VARIABLE_STEP_PARTITION_ID, partID);
break;
}
}
}
}
}
else
{
// This is a locally partitioned step...
int partitionNr = stepcopy;
String partitionNrString = new DecimalFormat("000").format(partitionNr);
setVariable(Const.INTERNAL_VARIABLE_STEP_PARTITION_NR, partitionNrString);
String partitionID = stepMeta.getStepPartitioningMeta().getPartitionSchema().getPartitionIDs().get(partitionNr);
setVariable(Const.INTERNAL_VARIABLE_STEP_PARTITION_ID, partitionID);
}
}
else if (!Const.isEmpty(partitionID))
{
setVariable(Const.INTERNAL_VARIABLE_STEP_PARTITION_ID, partitionID);
}
// Set a unique step number across all slave servers
// slaveNr * nrCopies + copyNr
uniqueStepNrAcrossSlaves = this.slaveNr * getStepMeta().getCopies() + stepcopy;
uniqueStepCountAcrossSlaves = this.clusterSize<=1 ? getStepMeta().getCopies() : this.clusterSize * getStepMeta().getCopies();
if (uniqueStepCountAcrossSlaves==0) uniqueStepCountAcrossSlaves = 1;
setVariable(Const.INTERNAL_VARIABLE_STEP_UNIQUE_NUMBER, Integer.toString(uniqueStepNrAcrossSlaves));
setVariable(Const.INTERNAL_VARIABLE_STEP_UNIQUE_COUNT, Integer.toString(uniqueStepCountAcrossSlaves));
setVariable(Const.INTERNAL_VARIABLE_STEP_COPYNR, Integer.toString(stepcopy));
// Now that these things have been done, we also need to start a number of server sockets.
// One for each of the remote output steps that we're going to write to.
try
{
// If this is on the master, separate logic applies.
// boolean isMaster = "Y".equalsIgnoreCase(getVariable(Const.INTERNAL_VARIABLE_CLUSTER_MASTER));
remoteOutputSteps = new ArrayList<RemoteStep>();
for (int i=0;i<stepMeta.getRemoteOutputSteps().size();i++) {
RemoteStep remoteStep = stepMeta.getRemoteOutputSteps().get(i);
// If the step run in multiple copies, we only want to open every socket once.
if (stepMeta.getCopies()==1 || (stepMeta.getCopies()>1 && getCopy()==i ) ) {
// Open a server socket to allow the remote output step to connect.
RemoteStep copy = (RemoteStep) remoteStep.clone();
try {
copy.openServerSocket(this);
if (log.isDetailed()) logDetailed("Opened a server socket connection to "+copy);
}
catch(Exception e) {
log.logError(toString(), "Unable to open server socket during step initialisation: "+copy.toString(), e);
throw new Exception(e);
}
remoteOutputSteps.add(copy);
}
}
}
catch(Exception e) {
for (RemoteStep remoteStep : remoteOutputSteps) {
if (remoteStep.getServerSocket()!=null) {
try {
remoteStep.getServerSocket().close();
} catch (IOException e1) {
log.logError(toString(), "Unable to close server socket after error during step initialisation", e);
}
}
}
return false;
}
// For the remote input steps to read from, we do the same: make a list and initialize what we can...
try
{
remoteInputSteps = new ArrayList<RemoteStep>();
if ((stepMeta.isPartitioned() && getClusterSize()>1) || stepMeta.getCopies() > 1) {
// If the step is partitioned or has multiple copies and clustered, we only want to take one remote input step per copy.
// This is where we make that selection...
for (int i=0;i<stepMeta.getRemoteInputSteps().size();i++) {
RemoteStep remoteStep = stepMeta.getRemoteInputSteps().get(i);
if (remoteStep.getTargetStepCopyNr()==stepcopy) {
RemoteStep copy = (RemoteStep) remoteStep.clone();
remoteInputSteps.add(copy);
}
}
}
else {
for (RemoteStep remoteStep : stepMeta.getRemoteInputSteps()) {
RemoteStep copy = (RemoteStep) remoteStep.clone();
remoteInputSteps.add(copy);
}
}
}
catch(Exception e) {
log.logError(toString(), "Unable to initialize remote input steps during step initialisation", e);
return false;
}
return true;
}
public void dispose(StepMetaInterface smi, StepDataInterface sdi)
{
sdi.setStatus(StepDataInterface.STATUS_DISPOSED);
}
public void cleanup()
{
for (ServerSocket serverSocket : serverSockets)
{
try {
serverSocket.close();
} catch (IOException e) {
log.logError(toString(), "Cleanup: Unable to close server socket ("+serverSocket.getLocalPort()+")", e);
}
}
}
public long getProcessed()
{
return getLinesRead();
}
public void setCopy(int cop)
{
stepcopy = cop;
}
/**
* @return The steps copy number (default 0)
*/
public int getCopy()
{
return stepcopy;
}
public long getErrors()
{
return errors;
}
public void setErrors(long e)
{
errors = e;
}
/**
* @return Returns the number of lines read from previous steps
*/
public long getLinesRead()
{
synchronized (statusCountersLock) {
return linesRead;
}
}
/**
* Increments the number of lines read from previous steps by one
* @return Returns the new value
*/
public long incrementLinesRead()
{
synchronized (statusCountersLock) {
return ++linesRead;
}
}
/**
* Decrements the number of lines read from previous steps by one
* @return Returns the new value
*/
public long decrementLinesRead()
{
synchronized (statusCountersLock) {
return --linesRead;
}
}
/**
* @param newLinesReadValue the new number of lines read from previous steps
*/
public void setLinesRead(long newLinesReadValue)
{
synchronized (statusCountersLock) {
linesRead = newLinesReadValue;
}
}
/**
* @return Returns the number of lines read from an input source: database, file, socket, etc.
*/
public long getLinesInput()
{
synchronized (statusCountersLock) {
return linesInput;
}
}
/**
* Increments the number of lines read from an input source: database, file, socket, etc.
* @return the new incremented value
*/
public long incrementLinesInput()
{
synchronized (statusCountersLock) {
return ++linesInput;
}
}
/**
* @param newLinesInputValue the new number of lines read from an input source: database, file, socket, etc.
*/
public void setLinesInput(long newLinesInputValue)
{
synchronized (statusCountersLock) {
linesInput = newLinesInputValue;
}
}
/**
* @return Returns the number of lines written to an output target: database, file, socket, etc.
*/
public long getLinesOutput()
{
synchronized (statusCountersLock) {
return linesOutput;
}
}
/**
* Increments the number of lines written to an output target: database, file, socket, etc.
* @return the new incremented value
*/
public long incrementLinesOutput()
{
synchronized (statusCountersLock) {
return ++linesOutput;
}
}
/**
* @param newLinesOutputValue the new number of lines written to an output target: database, file, socket, etc.
*/
public void setLinesOutput(long newLinesOutputValue)
{
synchronized (statusCountersLock) {
linesOutput = newLinesOutputValue;
}
}
/**
* @return Returns the linesWritten.
*/
public long getLinesWritten()
{
synchronized (statusCountersLock) {
return linesWritten;
}
}
/**
* Increments the number of lines written to next steps by one
* @return Returns the new value
*/
public long incrementLinesWritten()
{
synchronized (statusCountersLock) {
return ++linesWritten;
}
}
/**
* Decrements the number of lines written to next steps by one
* @return Returns the new value
*/
public long decrementLinesWritten()
{
synchronized (statusCountersLock) {
return --linesWritten;
}
}
/**
* @param newLinesWrittenValue the new number of lines written to next steps
*/
public void setLinesWritten(long newLinesWrittenValue)
{
synchronized (statusCountersLock) {
linesWritten = newLinesWrittenValue;
}
}
/**
* @return Returns the number of lines updated in an output target: database, file, socket, etc.
*/
public long getLinesUpdated()
{
synchronized (statusCountersLock) {
return linesUpdated;
}
}
/**
* Increments the number of lines updated in an output target: database, file, socket, etc.
* @return the new incremented value
*/
public long incrementLinesUpdated()
{
synchronized (statusCountersLock) {
return ++linesUpdated;
}
}
/**
* @param newLinesOutputValue the new number of lines updated in an output target: database, file, socket, etc.
*/
public void setLinesUpdated(long newLinesUpdatedValue)
{
synchronized (statusCountersLock) {
linesUpdated = newLinesUpdatedValue;
}
}
/**
* @return the number of lines rejected to an error handling step
*/
public long getLinesRejected()
{
synchronized (statusCountersLock) {
return linesRejected;
}
}
/**
* Increments the number of lines rejected to an error handling step
* @return the new incremented value
*/
public long incrementLinesRejected()
{
synchronized (statusCountersLock) {
return ++linesRejected;
}
}
/**
* @param newLinesRejectedValue lines number of lines rejected to an error handling step
*/
public void setLinesRejected(long newLinesRejectedValue)
{
synchronized (statusCountersLock) {
linesRejected = newLinesRejectedValue;
}
}
/**
* @return the number of lines skipped
*/
public long getLinesSkipped()
{
synchronized (statusCountersLock) {
return linesSkipped;
}
}
/**
* Increments the number of lines skipped
* @return the new incremented value
*/
public long incrementLinesSkipped()
{
synchronized (statusCountersLock) {
return ++linesSkipped;
}
}
/**
* @param newLinesSkippedValue lines number of lines skipped
*/
public void setLinesSkipped(long newLinesSkippedValue)
{
synchronized (statusCountersLock) {
linesSkipped = newLinesSkippedValue;
}
}
public String getStepname()
{
return stepname;
}
public void setStepname(String stepname)
{
this.stepname = stepname;
}
public Trans getDispatcher()
{
return trans;
}
public String getStatusDescription()
{
return statusDesc[getStatus()];
}
/**
* @return Returns the stepMetaInterface.
*/
public StepMetaInterface getStepMetaInterface()
{
return stepMetaInterface;
}
/**
* @param stepMetaInterface The stepMetaInterface to set.
*/
public void setStepMetaInterface(StepMetaInterface stepMetaInterface)
{
this.stepMetaInterface = stepMetaInterface;
}
/**
* @return Returns the stepDataInterface.
*/
public StepDataInterface getStepDataInterface()
{
return stepDataInterface;
}
/**
* @param stepDataInterface The stepDataInterface to set.
*/
public void setStepDataInterface(StepDataInterface stepDataInterface)
{
this.stepDataInterface = stepDataInterface;
}
/**
* @return Returns the stepMeta.
*/
public StepMeta getStepMeta()
{
return stepMeta;
}
/**
* @param stepMeta The stepMeta to set.
*/
public void setStepMeta(StepMeta stepMeta)
{
this.stepMeta = stepMeta;
}
/**
* @return Returns the transMeta.
*/
public TransMeta getTransMeta()
{
return transMeta;
}
/**
* @param transMeta The transMeta to set.
*/
public void setTransMeta(TransMeta transMeta)
{
this.transMeta = transMeta;
}
/**
* @return Returns the trans.
*/
public Trans getTrans()
{
return trans;
}
/**
* putRow is used to copy a row, to the alternate rowset(s) This should get priority over everything else!
* (synchronized) If distribute is true, a row is copied only once to the output rowsets, otherwise copies are sent
* to each rowset!
*
* @param row The row to put to the destination rowset(s).
* @throws KettleStepException
*/
public void putRow(RowMetaInterface rowMeta, Object[] row) throws KettleStepException
{
// Are we pausing the step? If so, stall forever...
while (paused.get() && !stopped.get()) {
try {
Thread.sleep(1);
} catch (InterruptedException e) {
throw new KettleStepException(e);
}
}
// Right after the pause loop we have to check if this thread is stopped or not.
if (stopped.get())
{
if (log.isDebug()) logDebug(Messages.getString("BaseStep.Log.StopPuttingARow")); //$NON-NLS-1$
stopAll();
return;
}
// Have all threads started?
// Are we running yet? If not, wait a bit until all threads have been started.
if(this.checkTransRunning == false){
while (!trans.isRunning() && !stopped.get())
{
try { Thread.sleep(1); } catch (InterruptedException e) { }
}
this.checkTransRunning = true;
}
// call all row listeners...
synchronized (this) {
for (int i = 0; i < rowListeners.size(); i++)
{
RowListener rowListener = (RowListener) rowListeners.get(i);
rowListener.rowWrittenEvent(rowMeta, row);
}
}
// Keep adding to terminator_rows buffer...
if (terminator && terminator_rows != null)
{
try
{
terminator_rows.add(rowMeta.cloneRow(row));
}
catch (KettleValueException e)
{
throw new KettleStepException("Unable to clone row while adding rows to the terminator rows.", e);
}
}
if (outputRowSets.isEmpty())
{
// No more output rowsets!
// Still update the nr of lines written.
incrementLinesWritten();
return; // we're done here!
}
// Repartitioning happens when the current step is not partitioned, but the next one is.
// That means we need to look up the partitioning information in the next step..
// If there are multiple steps, we need to look at the first (they should be all the same)
switch(repartitioning)
{
case StepPartitioningMeta.PARTITIONING_METHOD_NONE:
{
if (distributed)
{
// Copy the row to the "next" output rowset.
// We keep the next one in out_handling
RowSet rs = outputRowSets.get(currentOutputRowSetNr);
// Loop until we find room in the target rowset
while (!rs.putRow(rowMeta, row) && !isStopped())
;
incrementLinesWritten();
// Now determine the next output rowset!
// Only if we have more then one output...
if (outputRowSets.size() > 1)
{
currentOutputRowSetNr++;
if (currentOutputRowSetNr >= outputRowSets.size()) currentOutputRowSetNr = 0;
}
}
else
// Copy the row to all output rowsets
{
// Copy to the row in the other output rowsets...
for (int i = 1; i < outputRowSets.size(); i++) // start at 1
{
RowSet rs = outputRowSets.get(i);
try
{
// Loop until we find room in the target rowset
while (!rs.putRow(rowMeta, rowMeta.cloneRow(row)) && !isStopped())
;
incrementLinesWritten();
}
catch (KettleValueException e)
{
throw new KettleStepException("Unable to clone row while copying rows to multiple target steps", e);
}
}
// set row in first output rowset
RowSet rs = outputRowSets.get(0);
while (!rs.putRow(rowMeta, row) && !isStopped())
;
incrementLinesWritten();
}
}
break;
case StepPartitioningMeta.PARTITIONING_METHOD_SPECIAL:
{
if( nextStepPartitioningMeta == null )
{
// Look up the partitioning of the next step.
// This is the case for non-clustered partitioning...
StepMeta[] nextSteps = transMeta.getNextSteps(stepMeta);
if (nextSteps.length>0) {
nextStepPartitioningMeta = nextSteps[0].getStepPartitioningMeta();
}
// TODO: throw exception if we're not partitioning yet.
// For now it throws a NP Exception.
}
int partitionNr;
try
{
partitionNr = nextStepPartitioningMeta.getPartition(rowMeta, row);
}
catch (KettleException e)
{
throw new KettleStepException("Unable to convert a value to integer while calculating the partition number", e);
}
RowSet selectedRowSet = null;
if (clusteredPartitioningFirst) {
clusteredPartitioningFirst=false;
// We are only running remotely if both the distribution is there AND if the distribution is actually contains something.
clusteredPartitioning = transMeta.getSlaveStepCopyPartitionDistribution()!=null && !transMeta.getSlaveStepCopyPartitionDistribution().getDistribution().isEmpty();
}
// OK, we have a SlaveStepCopyPartitionDistribution in the transformation...
// We want to pre-calculate what rowset we're sending data to for which partition...
// It is only valid in clustering / partitioning situations.
// When doing a local partitioning, it is much simpler.
if (clusteredPartitioning) {
// This next block is only performed once for speed...
if (partitionNrRowSetList==null) {
partitionNrRowSetList = new RowSet[outputRowSets.size()];
// The distribution is calculated during transformation split
// The slave-step-copy distribution is passed onto the slave transformation
SlaveStepCopyPartitionDistribution distribution = transMeta.getSlaveStepCopyPartitionDistribution();
String nextPartitionSchemaName = TransSplitter.createPartitionSchemaNameFromTarget( nextStepPartitioningMeta.getPartitionSchema().getName() );
for (RowSet outputRowSet : outputRowSets) {
try
{
// Look at the pre-determined distribution, decided at "transformation split" time.
int partNr = distribution.getPartition(outputRowSet.getRemoteSlaveServerName(), nextPartitionSchemaName, outputRowSet.getDestinationStepCopy());
if (partNr<0) {
throw new KettleStepException("Unable to find partition using rowset data, slave="+outputRowSet.getRemoteSlaveServerName()+", partition schema="+nextStepPartitioningMeta.getPartitionSchema().getName()+", copy="+outputRowSet.getDestinationStepCopy());
}
partitionNrRowSetList[partNr] = outputRowSet;
}
catch(NullPointerException e) {
throw(e);
}
}
}
// OK, now get the target partition based on the partition nr...
// This should be very fast
selectedRowSet = partitionNrRowSetList[partitionNr];
}
else {
// Local partitioning...
// Put the row forward to the next step according to the partition rule.
selectedRowSet = outputRowSets.get(partitionNr);
}
if (selectedRowSet==null) {
logBasic("Target rowset is not available for target partition, partitionNr="+partitionNr);
}
// logBasic("Putting row to partition #"+partitionNr);
while (!selectedRowSet.putRow(rowMeta, row) && !isStopped())
;
incrementLinesWritten();
if (log.isRowLevel())
try {
logRowlevel("Partitioned #"+partitionNr+" to "+selectedRowSet+", row="+rowMeta.getString(row));
} catch (KettleValueException e) {
throw new KettleStepException(e);
}
}
break;
case StepPartitioningMeta.PARTITIONING_METHOD_MIRROR:
{
// Copy always to all target steps/copies.
for (int r = 0; r < outputRowSets.size(); r++)
{
RowSet rowSet = outputRowSets.get(r);
while (!rowSet.putRow(rowMeta, row) && !isStopped())
;
}
}
break;
default:
throw new KettleStepException("Internal error: invalid repartitioning type: " + repartitioning);
}
}
/**
* putRowTo is used to put a row in a certain specific RowSet.
*
* @param rowMeta The row meta-data to put to the destination RowSet.
* @param row the data to put in the RowSet
* @param rowSet the RoWset to put the row into.
* @throws KettleStepException In case something unexpected goes wrong
*/
public void putRowTo(RowMetaInterface rowMeta, Object[] row, RowSet rowSet) throws KettleStepException
{
// Are we pausing the step? If so, stall forever...
while (paused.get() && !stopped.get()) {
try {
Thread.sleep(1);
} catch (InterruptedException e) {
throw new KettleStepException(e);
}
}
// call all row listeners...
for (int i = 0; i < rowListeners.size(); i++)
{
RowListener rowListener = rowListeners.get(i);
rowListener.rowWrittenEvent(rowMeta, row);
}
// Keep adding to terminator_rows buffer...
if (terminator && terminator_rows != null)
{
try
{
terminator_rows.add(rowMeta.cloneRow(row));
}
catch (KettleValueException e)
{
throw new KettleStepException("Unable to clone row while adding rows to the terminator buffer", e);
}
}
if (stopped.get())
{
if (log.isDebug()) logDebug(Messages.getString("BaseStep.Log.StopPuttingARow")); //$NON-NLS-1$
stopAll();
return;
}
// Don't distribute or anything, only go to this rowset!
while (!rowSet.putRow(rowMeta, row) && !isStopped())
;
incrementLinesWritten();
}
public void putError(RowMetaInterface rowMeta, Object[] row, long nrErrors, String errorDescriptions, String fieldNames, String errorCodes) throws KettleStepException
{
StepErrorMeta stepErrorMeta = stepMeta.getStepErrorMeta();
if (errorRowMeta==null)
{
errorRowMeta = rowMeta.clone();
RowMetaInterface add = stepErrorMeta.getErrorRowMeta(nrErrors, errorDescriptions, fieldNames, errorCodes);
errorRowMeta.addRowMeta(add);
}
Object[] errorRowData = RowDataUtil.allocateRowData(errorRowMeta.size());
if (row!=null) System.arraycopy(row, 0, errorRowData, 0, rowMeta.size());
// Also add the error fields...
stepErrorMeta.addErrorRowData(errorRowData, rowMeta.size(), nrErrors, errorDescriptions, fieldNames, errorCodes);
// call all rowlisteners...
for (int i = 0; i < rowListeners.size(); i++)
{
RowListener rowListener = (RowListener) rowListeners.get(i);
rowListener.errorRowWrittenEvent(rowMeta, row);
}
if (errorRowSet!=null)
{
while (!errorRowSet.putRow(errorRowMeta, errorRowData) && !isStopped())
;
incrementLinesRejected();
}
verifyRejectionRates();
}
private void verifyRejectionRates()
{
StepErrorMeta stepErrorMeta = stepMeta.getStepErrorMeta();
if (stepErrorMeta==null) return; // nothing to verify.
// Was this one error too much?
if (stepErrorMeta.getMaxErrors()>0 && getLinesRejected()>stepErrorMeta.getMaxErrors())
{
logError(Messages.getString("BaseStep.Log.TooManyRejectedRows", Long.toString(stepErrorMeta.getMaxErrors()), Long.toString(getLinesRejected())));
setErrors(1L);
stopAll();
}
if ( stepErrorMeta.getMaxPercentErrors()>0 && getLinesRejected()>0 &&
( stepErrorMeta.getMinPercentRows()<=0 || getLinesRead()>=stepErrorMeta.getMinPercentRows())
)
{
int pct = (int) (100 * getLinesRejected() / getLinesRead() );
if (pct>stepErrorMeta.getMaxPercentErrors())
{
logError(Messages.getString("BaseStep.Log.MaxPercentageRejectedReached", Integer.toString(pct) ,Long.toString(getLinesRejected()), Long.toString(getLinesRead())));
setErrors(1L);
stopAll();
}
}
}
private RowSet currentInputStream()
{
return inputRowSets.get(currentInputRowSetNr);
}
/**
* Find the next not-finished input-stream... in_handling says which one...
*/
private void nextInputStream()
{
synchronized(inputRowSets) {
blockPointer=0;
int streams = inputRowSets.size();
// No more streams left: exit!
if (streams == 0) return;
// Just the one rowSet (common case)
if (streams == 1) currentInputRowSetNr = 0;
// If we have some left: take the next!
currentInputRowSetNr++;
if (currentInputRowSetNr >= inputRowSets.size()) currentInputRowSetNr = 0;
}
}
/**
* In case of getRow, we receive data from previous steps through the input rowset. In case we split the stream, we
* have to copy the data to the alternate splits: rowsets 1 through n.
*/
public Object[] getRow() throws KettleException
{
// Are we pausing the step? If so, stall forever...
while (paused.get() && !stopped.get()) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
throw new KettleStepException(e);
}
}
if (stopped.get())
{
if (log.isDebug()) logDebug(Messages.getString("BaseStep.Log.StopLookingForMoreRows")); //$NON-NLS-1$
stopAll();
return null;
}
// Have all threads started?
// Are we running yet? If not, wait a bit until all threads have been started.
if (this.checkTransRunning == false) {
while (!trans.isRunning() && !stopped.get())
{
try { Thread.sleep(1); } catch (InterruptedException e) { }
}
this.checkTransRunning = true;
}
// See if we need to open sockets to remote input steps...
openRemoteInputStepSocketsOnce();
// If everything is finished, we can stop immediately!
if (inputRowSets.isEmpty())
{
return null;
}
RowSet inputRowSet = null;
Object[] row=null;
// Do we need to switch to the next input stream?
if (blockPointer>=NR_OF_ROWS_IN_BLOCK) {
// Take a peek at the next input stream.
// If there is no data, process another NR_OF_ROWS_IN_BLOCK on the next input stream.
for (int r=0;r<inputRowSets.size() && row==null;r++) {
nextInputStream();
inputRowSet = currentInputStream();
row = inputRowSet.getRowImmediate();
}
if (row!=null) incrementLinesRead();
}
else {
// What's the current input stream?
inputRowSet = currentInputStream();
}
// To reduce stress on the locking system we are going to allow
// The buffer to grow beyond "a few" entries.
// We'll only do that if the previous step has not ended...
if (isUsingThreadPriorityManagment() && !inputRowSet.isDone() && inputRowSet.size()<= ( transMeta.getSizeRowset()>>6 ) && !isStopped())
{
try { Thread.sleep(0,1); } catch (InterruptedException e) { }
}
// See if this step is receiving partitioned data...
// In that case it might be the case that one input row set is receiving all data and
// the other rowsets nothing. (repartitioning on the same key would do that)
// We never guaranteed that the input rows would be read one by one alternatively.
// So in THIS particular case it is safe to just read 100 rows from one rowset, then switch to another etc.
// We can use timeouts to switch from one to another...
while (row==null && !isStopped()) {
// Get a row from the input in row set ...
// Timeout almost immediately if nothing is there to read.
// We will then switch to the next row set to read from...
row = inputRowSet.getRowWait(1, TimeUnit.MILLISECONDS);
if (row!=null) {
incrementLinesRead();
blockPointer++;
}
else {
// Try once more...
// If row is still empty and the row set is done, we remove the row set from
// the input stream and move on to the next one...
if (inputRowSet.isDone()) {
row = inputRowSet.getRowWait(1, TimeUnit.MILLISECONDS);
if (row==null) {
inputRowSets.remove(currentInputRowSetNr);
if (inputRowSets.isEmpty()) return null; // We're completely done.
}
else {
incrementLinesRead();
}
}
nextInputStream();
inputRowSet = currentInputStream();
}
}
// This rowSet is perhaps no longer giving back rows?
while (row==null && !stopped.get()) {
// Try the next input row set(s) until we find a row set that still has rows...
// The getRowFrom() method removes row sets from the input row sets list.
if (inputRowSets.isEmpty()) return null; // We're done.
nextInputStream();
inputRowSet = currentInputStream();
row = getRowFrom(inputRowSet);
}
// Also set the meta data on the first occurrence.
if (inputRowMeta==null) {
inputRowMeta=inputRowSet.getRowMeta();
}
if ( row != null )
{
// OK, before we return the row, let's see if we need to check on mixing row compositions...
if (safeModeEnabled)
{
safeModeChecking(inputRowSet.getRowMeta(), inputRowMeta); // Extra checking
if (row.length<inputRowMeta.size()) {
throw new KettleException("Safe mode check noticed that the length of the row data is smaller ("+row.length+") than the row metadata size ("+inputRowMeta.size()+")");
}
}
for (int i = 0; i < rowListeners.size(); i++)
{
RowListener rowListener = (RowListener) rowListeners.get(i);
rowListener.rowReadEvent(inputRowMeta, row);
}
}
// Check the rejection rates etc. as well.
verifyRejectionRates();
return row;
}
/**
* Opens socket connections to the remote input steps of this step.
* <br>This method should be used by steps that don't call getRow() first in which it is executed automatically.
* <br><b>This method should be called before any data is read from previous steps.</b>
* <br>This action is executed only once.
* @throws KettleStepException
*/
protected void openRemoteInputStepSocketsOnce() throws KettleStepException {
if (!remoteInputSteps.isEmpty()) {
if (!remoteInputStepsInitialized) {
// Loop over the remote steps and open client sockets to them
// Just be careful in case we're dealing with a partitioned clustered step.
// A partitioned clustered step has only one. (see dispatch())
for (RemoteStep remoteStep : remoteInputSteps) {
try {
RowSet rowSet = remoteStep.openReaderSocket(this);
inputRowSets.add(rowSet);
} catch (Exception e) {
throw new KettleStepException("Error opening reader socket to remote step '"+remoteStep+"'", e);
}
}
remoteInputStepsInitialized = true;
}
}
}
/**
* Opens socket connections to the remote output steps of this step.
* <br>This method is called in method initBeforeStart() because it needs to connect to the server sockets (remote steps) as soon as possible to avoid time-out situations.
* <br>This action is executed only once.
* @throws KettleStepException
*/
protected void openRemoteOutputStepSocketsOnce() throws KettleStepException {
if (!remoteOutputSteps.isEmpty()) {
if (!remoteOutputStepsInitialized) {
// Set the current slave target name on all the current output steps (local)
for (int c=0;c<outputRowSets.size();c++) {
RowSet rowSet = outputRowSets.get(c);
rowSet.setRemoteSlaveServerName(getVariable(Const.INTERNAL_VARIABLE_SLAVE_SERVER_NAME));
if (getVariable(Const.INTERNAL_VARIABLE_SLAVE_SERVER_NAME)==null) {
throw new KettleStepException("Variable '"+Const.INTERNAL_VARIABLE_SLAVE_SERVER_NAME+"' is not defined.");
}
}
// Start threads: one per remote step to funnel the data through...
for (int i=0;i<remoteOutputSteps.size();i++) {
RemoteStep remoteStep = remoteOutputSteps.get(i);
try {
if (remoteStep.getTargetSlaveServerName()==null) {
throw new KettleStepException("The target slave server name is not defined for remote output step: "+remoteStep);
}
RowSet rowSet = remoteStep.openWriterSocket();
if (log.isDetailed()) logDetailed("Opened a writer socket to remote step: "+remoteStep);
outputRowSets.add(rowSet);
} catch (IOException e) {
throw new KettleStepException("Error opening writer socket to remote step '"+remoteStep+"'", e);
}
}
remoteOutputStepsInitialized = true;
}
}
}
protected void safeModeChecking(RowMetaInterface row) throws KettleRowException
{
if (row==null) {
return;
}
if (inputReferenceRow == null)
{
inputReferenceRow = row.clone(); // copy it!
// Check for double field names.
String[] fieldnames = row.getFieldNames();
Arrays.sort(fieldnames);
for (int i=0;i<fieldnames.length-1;i++)
{
if (fieldnames[i].equals(fieldnames[i+1]))
{
throw new KettleRowException(Messages.getString("BaseStep.SafeMode.Exception.DoubleFieldnames", fieldnames[i]));
}
}
}
else
{
safeModeChecking(inputReferenceRow, row);
}
}
public static void safeModeChecking(RowMetaInterface referenceRowMeta, RowMetaInterface rowMeta) throws KettleRowException
{
// See if the row we got has the same layout as the reference row.
// First check the number of fields
if (referenceRowMeta.size() != rowMeta.size())
{
throw new KettleRowException(Messages.getString("BaseStep.SafeMode.Exception.VaryingSize", ""+referenceRowMeta.size(), ""+rowMeta.size(), rowMeta.toString()));
}
else
{
// Check field by field for the position of the names...
for (int i = 0; i < referenceRowMeta.size(); i++)
{
ValueMetaInterface referenceValue = referenceRowMeta.getValueMeta(i);
ValueMetaInterface compareValue = rowMeta.getValueMeta(i);
if (!referenceValue.getName().equalsIgnoreCase(compareValue.getName()))
{
throw new KettleRowException(Messages.getString("BaseStep.SafeMode.Exception.MixingLayout", ""+(i+1), referenceValue.getName()+" "+referenceValue.toStringMeta(), compareValue.getName()+" "+compareValue.toStringMeta()));
}
if (referenceValue.getType()!=compareValue.getType())
{
throw new KettleRowException(Messages.getString("BaseStep.SafeMode.Exception.MixingTypes", ""+(i+1), referenceValue.getName()+" "+referenceValue.toStringMeta(), compareValue.getName()+" "+compareValue.toStringMeta()));
}
}
}
}
public Object[] getRowFrom(RowSet rowSet) throws KettleStepException {
// Are we pausing the step? If so, stall forever...
while (paused.get() && !stopped.get()) {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
throw new KettleStepException(e);
}
}
// To reduce stress on the locking system we are going to allow
// The buffer to grow beyond "a few" entries.
// We'll only do that if the previous step has not ended...
if (isUsingThreadPriorityManagment() && !rowSet.isDone() && rowSet.size()<= ( transMeta.getSizeRowset()>>6 ) && !isStopped())
{
try { Thread.sleep(0,1); } catch (InterruptedException e) { }
}
// Grab a row... If nothing received after a timeout, try again.
Object[] rowData = rowSet.getRow();
while (rowData==null && !rowSet.isDone() && !stopped.get())
{
rowData=rowSet.getRow();
}
// Still nothing: no more rows to be had?
if (rowData==null && rowSet.isDone()) {
// Try one more time to get a row to make sure we don't get a race-condition between the get and the isDone()
rowData = rowSet.getRow();
}
if (stopped.get())
{
if (log.isDebug()) logDebug(Messages.getString("BaseStep.Log.StopLookingForMoreRows")); //$NON-NLS-1$
stopAll();
return null;
}
if (rowData==null && rowSet.isDone())
{
// Try one more time...
rowData = rowSet.getRow();
if (rowData==null) {
inputRowSets.remove(rowSet);
return null;
}
}
incrementLinesRead();
// call all rowlisteners...
for (int i = 0; i < rowListeners.size(); i++)
{
RowListener rowListener = (RowListener) rowListeners.get(i);
rowListener.rowReadEvent(rowSet.getRowMeta(), rowData);
}
return rowData;
}
public RowSet findInputRowSet(String sourceStep) {
return findInputRowSet(sourceStep, 0, getStepname(), getCopy());
}
public RowSet findInputRowSet(String from, int fromcopy, String to, int tocopy)
{
for (RowSet rs : inputRowSets)
{
if (rs.getOriginStepName().equalsIgnoreCase(from) && rs.getDestinationStepName().equalsIgnoreCase(to)
&& rs.getOriginStepCopy() == fromcopy && rs.getDestinationStepCopy() == tocopy) return rs;
}
return null;
}
public RowSet findOutputRowSet(String targetStep) {
return findOutputRowSet(getStepname(), getCopy(), targetStep, 0);
}
public RowSet findOutputRowSet(String from, int fromcopy, String to, int tocopy)
{
for (RowSet rs : outputRowSets)
{
if (rs.getOriginStepName().equalsIgnoreCase(from) && rs.getDestinationStepName().equalsIgnoreCase(to)
&& rs.getOriginStepCopy() == fromcopy && rs.getDestinationStepCopy() == tocopy) return rs;
}
return null;
}
// We have to tell the next step we're finished with
// writing to output rowset(s)!
public void setOutputDone()
{
if (log.isDebug()) logDebug(Messages.getString("BaseStep.Log.OutputDone", String.valueOf(outputRowSets.size()))); //$NON-NLS-1$ //$NON-NLS-2$
synchronized(outputRowSets)
{
for (int i = 0; i < outputRowSets.size(); i++)
{
RowSet rs = outputRowSets.get(i);
rs.setDone();
}
if (errorRowSet!=null) errorRowSet.setDone();
}
}
/**
* This method finds the surrounding steps and rowsets for this base step. This steps keeps it's own list of rowsets
* (etc.) to prevent it from having to search every time.
*/
public void dispatch()
{
if (transMeta == null) { // for preview reasons, no dispatching is done!
return;
}
StepMeta stepMeta = transMeta.findStep(stepname);
if (log.isDetailed()) logDetailed(Messages.getString("BaseStep.Log.StartingBuffersAllocation")); //$NON-NLS-1$
// How many next steps are there? 0, 1 or more??
// How many steps do we send output to?
List<StepMeta> previousSteps = transMeta.findPreviousSteps(stepMeta, true);
int nrInput = previousSteps.size();
int nrOutput = transMeta.findNrNextSteps(stepMeta);
inputRowSets = new ArrayList<RowSet>();
outputRowSets = new ArrayList<RowSet>();
errorRowSet = null;
prevSteps = new StepMeta[nrInput];
nextSteps = new StepMeta[nrOutput];
currentInputRowSetNr = 0; // we start with input[0];
if (log.isDetailed()) logDetailed(Messages.getString("BaseStep.Log.StepInfo", String.valueOf(nrInput), String.valueOf(nrOutput))); //$NON-NLS-1$ //$NON-NLS-2$
for (int i = 0; i < previousSteps.size(); i++)
{
prevSteps[i] = previousSteps.get(i);
if (log.isDetailed()) logDetailed(Messages.getString("BaseStep.Log.GotPreviousStep", stepname, String.valueOf(i), prevSteps[i].getName())); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// Looking at the previous step, you can have either 1 rowset to look at or more then one.
int prevCopies = prevSteps[i].getCopies();
int nextCopies = stepMeta.getCopies();
if (log.isDetailed()) logDetailed(Messages.getString("BaseStep.Log.InputRowInfo", String.valueOf(prevCopies), String.valueOf(nextCopies))); //$NON-NLS-1$ //$NON-NLS-2$
int nrCopies;
int dispatchType;
if (prevCopies == 1 && nextCopies == 1)
{
dispatchType = Trans.TYPE_DISP_1_1;
nrCopies = 1;
}
else
{
if (prevCopies == 1 && nextCopies > 1)
{
dispatchType = Trans.TYPE_DISP_1_N;
nrCopies = 1;
}
else
{
if (prevCopies > 1 && nextCopies == 1)
{
dispatchType = Trans.TYPE_DISP_N_1;
nrCopies = prevCopies;
}
else
{
if (prevCopies == nextCopies)
{
dispatchType = Trans.TYPE_DISP_N_N;
nrCopies = 1;
}
else
{
dispatchType = Trans.TYPE_DISP_N_M;
nrCopies = prevCopies;
}
}
}
}
for (int c = 0; c < nrCopies; c++)
{
RowSet rowSet = null;
switch (dispatchType)
{
case Trans.TYPE_DISP_1_1:
rowSet = trans.findRowSet(prevSteps[i].getName(), 0, stepname, 0);
break;
case Trans.TYPE_DISP_1_N:
rowSet = trans.findRowSet(prevSteps[i].getName(), 0, stepname, getCopy());
break;
case Trans.TYPE_DISP_N_1:
rowSet = trans.findRowSet(prevSteps[i].getName(), c, stepname, 0);
break;
case Trans.TYPE_DISP_N_N:
rowSet = trans.findRowSet(prevSteps[i].getName(), getCopy(), stepname, getCopy());
break;
case Trans.TYPE_DISP_N_M:
rowSet = trans.findRowSet(prevSteps[i].getName(), c, stepname, getCopy());
break;
}
if (rowSet != null)
{
inputRowSets.add(rowSet);
if (log.isDetailed()) logDetailed(Messages.getString("BaseStep.Log.FoundInputRowset", rowSet.getName())); //$NON-NLS-1$ //$NON-NLS-2$
}
else
{
if (!prevSteps[i].isMapping() && !stepMeta.isMapping()) {
logError(Messages.getString("BaseStep.Log.UnableToFindInputRowset")); //$NON-NLS-1$
setErrors(1);
stopAll();
return;
}
}
}
}
// And now the output part!
for (int i = 0; i < nrOutput; i++)
{
nextSteps[i] = transMeta.findNextStep(stepMeta, i);
int prevCopies = stepMeta.getCopies();
int nextCopies = nextSteps[i].getCopies();
if (log.isDetailed()) logDetailed(Messages.getString("BaseStep.Log.OutputRowInfo", String.valueOf(prevCopies), String.valueOf(nextCopies))); //$NON-NLS-1$ //$NON-NLS-2$
int nrCopies;
int dispatchType;
if (prevCopies == 1 && nextCopies == 1)
{
dispatchType = Trans.TYPE_DISP_1_1;
nrCopies = 1;
}
else
{
if (prevCopies == 1 && nextCopies > 1)
{
dispatchType = Trans.TYPE_DISP_1_N;
nrCopies = nextCopies;
}
else
{
if (prevCopies > 1 && nextCopies == 1)
{
dispatchType = Trans.TYPE_DISP_N_1;
nrCopies = 1;
}
else
{
if (prevCopies == nextCopies)
{
dispatchType = Trans.TYPE_DISP_N_N;
nrCopies = 1;
}
else
{
dispatchType = Trans.TYPE_DISP_N_M;
nrCopies = nextCopies;
}
}
}
}
for (int c = 0; c < nrCopies; c++)
{
RowSet rowSet = null;
switch (dispatchType)
{
case Trans.TYPE_DISP_1_1:
rowSet = trans.findRowSet(stepname, 0, nextSteps[i].getName(), 0);
break;
case Trans.TYPE_DISP_1_N:
rowSet = trans.findRowSet(stepname, 0, nextSteps[i].getName(), c);
break;
case Trans.TYPE_DISP_N_1:
rowSet = trans.findRowSet(stepname, getCopy(), nextSteps[i].getName(), 0);
break;
case Trans.TYPE_DISP_N_N:
rowSet = trans.findRowSet(stepname, getCopy(), nextSteps[i].getName(), getCopy());
break;
case Trans.TYPE_DISP_N_M:
rowSet = trans.findRowSet(stepname, getCopy(), nextSteps[i].getName(), c);
break;
}
if (rowSet != null)
{
outputRowSets.add(rowSet);
if (log.isDetailed()) logDetailed(Messages.getString("BaseStep.Log.FoundOutputRowset", rowSet.getName())); //$NON-NLS-1$ //$NON-NLS-2$
}
else
{
if (!stepMeta.isMapping() && !nextSteps[i].isMapping()) {
logError(Messages.getString("BaseStep.Log.UnableToFindOutputRowset")); //$NON-NLS-1$
setErrors(1);
stopAll();
return;
}
}
}
}
if (stepMeta.getTargetStepPartitioningMeta()!=null) {
nextStepPartitioningMeta = stepMeta.getTargetStepPartitioningMeta();
}
if (log.isDetailed()) logDetailed(Messages.getString("BaseStep.Log.FinishedDispatching")); //$NON-NLS-1$
}
public void logMinimal(String s)
{
log.println(LogWriter.LOG_LEVEL_MINIMAL, stepname + "." + stepcopy, s); //$NON-NLS-1$
}
public void logBasic(String s)
{
log.println(LogWriter.LOG_LEVEL_BASIC, stepname + "." + stepcopy, s); //$NON-NLS-1$
}
public void logError(String s)
{
log.println(LogWriter.LOG_LEVEL_ERROR, stepname + "." + stepcopy, s); //$NON-NLS-1$
}
public void logError(String s, Throwable e)
{
log.logError(stepname + "." + stepcopy, s, e); //$NON-NLS-1$
}
public void logDetailed(String s)
{
log.println(LogWriter.LOG_LEVEL_DETAILED, stepname + "." + stepcopy, s); //$NON-NLS-1$
}
public void logDebug(String s)
{
log.println(LogWriter.LOG_LEVEL_DEBUG, stepname + "." + stepcopy, s); //$NON-NLS-1$
}
public void logRowlevel(String s)
{
log.println(LogWriter.LOG_LEVEL_ROWLEVEL, stepname + "." + stepcopy, s); //$NON-NLS-1$
}
public int getNextClassNr()
{
int ret = trans.class_nr;
trans.class_nr++;
return ret;
}
public boolean outputIsDone()
{
int nrstopped = 0;
for (RowSet rs : outputRowSets)
{
if (rs.isDone()) nrstopped++;
}
return nrstopped >= outputRowSets.size();
}
public void stopAll()
{
stopped.set(true);
trans.stopAll();
}
public boolean isStopped()
{
return stopped.get();
}
public boolean isPaused()
{
return paused.get();
}
public void setStopped(boolean stopped) {
this.stopped.set(stopped);
}
public void setStopped(AtomicBoolean stopped) {
this.stopped = stopped;
}
public void pauseRunning() {
setPaused(true);
}
public void resumeRunning() {
setPaused(false);
}
public void setPaused(boolean paused) {
this.paused.set(paused);
}
public void setPaused(AtomicBoolean paused) {
this.paused = paused;
}
public boolean isInitialising()
{
return init;
}
public void markStart()
{
Calendar cal = Calendar.getInstance();
start_time = cal.getTime();
setInternalVariables();
}
public void setInternalVariables()
{
setVariable(Const.INTERNAL_VARIABLE_STEP_NAME, stepname);
setVariable(Const.INTERNAL_VARIABLE_STEP_COPYNR, Integer.toString(getCopy()));
}
public void markStop()
{
Calendar cal = Calendar.getInstance();
stop_time = cal.getTime();
// Here we are completely done with the transformation.
// Call all the attached listeners and notify the outside world that the step has finished.
for (StepListener stepListener : stepListeners) {
stepListener.stepFinished(trans, stepMeta, this);
}
}
public long getRuntime()
{
long lapsed;
if (start_time != null && stop_time == null)
{
Calendar cal = Calendar.getInstance();
long now = cal.getTimeInMillis();
long st = start_time.getTime();
lapsed = now - st;
}
else
if (start_time != null && stop_time != null)
{
lapsed = stop_time.getTime() - start_time.getTime();
}
else
{
lapsed = 0;
}
return lapsed;
}
public RowMetaAndData buildLog(String sname, int copynr, long lines_read, long lines_written, long lines_updated, long lines_skipped, long errors, Date start_date, Date end_date)
{
RowMetaInterface r = new RowMeta();
Object[] data = new Object[9];
int nr=0;
r.addValueMeta(new ValueMeta(Messages.getString("BaseStep.ColumnName.Stepname"), ValueMetaInterface.TYPE_STRING)); //$NON-NLS-1$
data[nr]=sname;
nr++;
r.addValueMeta(new ValueMeta(Messages.getString("BaseStep.ColumnName.Copy"), ValueMetaInterface.TYPE_NUMBER)); //$NON-NLS-1$
data[nr]=new Double(copynr);
nr++;
r.addValueMeta(new ValueMeta(Messages.getString("BaseStep.ColumnName.LinesReaded"), ValueMetaInterface.TYPE_NUMBER)); //$NON-NLS-1$
data[nr]=new Double(lines_read);
nr++;
r.addValueMeta(new ValueMeta(Messages.getString("BaseStep.ColumnName.LinesWritten"), ValueMetaInterface.TYPE_NUMBER)); //$NON-NLS-1$
data[nr]=new Double(lines_written);
nr++;
r.addValueMeta(new ValueMeta(Messages.getString("BaseStep.ColumnName.LinesUpdated"), ValueMetaInterface.TYPE_NUMBER)); //$NON-NLS-1$
data[nr]=new Double(lines_updated);
nr++;
r.addValueMeta(new ValueMeta(Messages.getString("BaseStep.ColumnName.LinesSkipped"), ValueMetaInterface.TYPE_NUMBER)); //$NON-NLS-1$
data[nr]=new Double(lines_skipped);
nr++;
r.addValueMeta(new ValueMeta(Messages.getString("BaseStep.ColumnName.Errors"), ValueMetaInterface.TYPE_NUMBER)); //$NON-NLS-1$
data[nr]=new Double(errors);
nr++;
r.addValueMeta(new ValueMeta("start_date", ValueMetaInterface.TYPE_DATE)); //$NON-NLS-1$
data[nr]=start_date;
nr++;
r.addValueMeta(new ValueMeta("end_date", ValueMetaInterface.TYPE_DATE)); //$NON-NLS-1$
data[nr]=end_date;
nr++;
return new RowMetaAndData(r, data);
}
public static final RowMetaInterface getLogFields(String comm)
{
RowMetaInterface r = new RowMeta();
ValueMetaInterface sname = new ValueMeta(Messages.getString("BaseStep.ColumnName.Stepname"), ValueMetaInterface.TYPE_STRING); //$NON-NLS-1$ //$NON-NLS-2$
sname.setLength(256);
r.addValueMeta(sname);
r.addValueMeta(new ValueMeta(Messages.getString("BaseStep.ColumnName.Copy"), ValueMetaInterface.TYPE_NUMBER)); //$NON-NLS-1$
r.addValueMeta(new ValueMeta(Messages.getString("BaseStep.ColumnName.LinesReaded"), ValueMetaInterface.TYPE_NUMBER)); //$NON-NLS-1$
r.addValueMeta(new ValueMeta(Messages.getString("BaseStep.ColumnName.LinesWritten"), ValueMetaInterface.TYPE_NUMBER)); //$NON-NLS-1$
r.addValueMeta(new ValueMeta(Messages.getString("BaseStep.ColumnName.LinesUpdated"), ValueMetaInterface.TYPE_NUMBER)); //$NON-NLS-1$
r.addValueMeta(new ValueMeta(Messages.getString("BaseStep.ColumnName.LinesSkipped"), ValueMetaInterface.TYPE_NUMBER)); //$NON-NLS-1$
r.addValueMeta(new ValueMeta(Messages.getString("BaseStep.ColumnName.Errors"), ValueMetaInterface.TYPE_NUMBER)); //$NON-NLS-1$
r.addValueMeta(new ValueMeta(Messages.getString("BaseStep.ColumnName.StartDate"), ValueMetaInterface.TYPE_DATE)); //$NON-NLS-1$
r.addValueMeta(new ValueMeta(Messages.getString("BaseStep.ColumnName.EndDate"), ValueMetaInterface.TYPE_DATE)); //$NON-NLS-1$
for (int i = 0; i < r.size(); i++)
{
r.getValueMeta(i).setOrigin(comm);
}
return r;
}
public String toString()
{
if (!Const.isEmpty(partitionID)) {
return stepname + "." + partitionID; //$NON-NLS-1$
}
else if (clusterSize>1) {
return stepname + "." + slaveNr+"."+getCopy(); //$NON-NLS-1$ //$NON-NLS-2$
}
else {
return stepname + "." + getCopy(); //$NON-NLS-1$
}
}
public Thread getThread()
{
return this;
}
public int rowsetOutputSize()
{
int size = 0;
int i;
for (i = 0; i < outputRowSets.size(); i++)
{
size += outputRowSets.get(i).size();
}
return size;
}
public int rowsetInputSize()
{
int size = 0;
int i;
for (i = 0; i < inputRowSets.size(); i++)
{
size += inputRowSets.get(i).size();
}
return size;
}
/**
* Create a new empty StepMeta class from the steploader
*
* @param stepplugin The step/plugin to use
* @param steploader The StepLoader to load from
* @return The requested class.
*/
public static final StepMetaInterface getStepInfo(StepPlugin stepplugin, StepLoader steploader) throws KettleStepLoaderException
{
return steploader.getStepClass(stepplugin);
}
public static final String getIconFilename(int steptype)
{
return steps[steptype].getImageFileName();
}
/**
* Perform actions to stop a running step. This can be stopping running SQL queries (cancel), etc. Default it
* doesn't do anything.
*
* @param stepDataInterface The interface to the step data containing the connections, resultsets, open files, etc.
* @throws KettleException in case something goes wrong
*
*/
public void stopRunning(StepMetaInterface stepMetaInterface, StepDataInterface stepDataInterface) throws KettleException
{
}
/**
* Stops running operations This method is deprecated, please use the method specifying the metadata and data
* interfaces.
*
* @deprecated
*/
public void stopRunning()
{
}
public void logSummary()
{
synchronized (statusCountersLock) {
long li = getLinesInput();
long lo = getLinesOutput();
long lr = getLinesRead();
long lw = getLinesWritten();
long lu = getLinesUpdated();
long lj = getLinesRejected();
if (li > 0 || lo > 0 || lr > 0 || lw > 0 || lu > 0 || lj > 0 || errors > 0)
logBasic(Messages.getString("BaseStep.Log.SummaryInfo", String.valueOf(li), String.valueOf(lo), String.valueOf(lr), String.valueOf(lw), String.valueOf(lw), String.valueOf(errors+lj)));
else
logDetailed(Messages.getString("BaseStep.Log.SummaryInfo", String.valueOf(li), String.valueOf(lo), String.valueOf(lr), String.valueOf(lw), String.valueOf(lw), String.valueOf(errors+lj)));
}
}
public String getStepID()
{
if (stepMeta != null) return stepMeta.getStepID();
return null;
}
/**
* @return Returns the inputRowSets.
*/
public List<RowSet> getInputRowSets()
{
return inputRowSets;
}
/**
* @param inputRowSets The inputRowSets to set.
*/
public void setInputRowSets(ArrayList<RowSet> inputRowSets)
{
this.inputRowSets = inputRowSets;
}
/**
* @return Returns the outputRowSets.
*/
public List<RowSet> getOutputRowSets()
{
return outputRowSets;
}
/**
* @param outputRowSets The outputRowSets to set.
*/
public void setOutputRowSets(ArrayList<RowSet> outputRowSets)
{
this.outputRowSets = outputRowSets;
}
/**
* @return Returns the distributed.
*/
public boolean isDistributed()
{
return distributed;
}
/**
* @param distributed The distributed to set.
*/
public void setDistributed(boolean distributed)
{
this.distributed = distributed;
}
public void addRowListener(RowListener rowListener)
{
rowListeners.add(rowListener);
}
public void removeRowListener(RowListener rowListener)
{
rowListeners.remove(rowListener);
}
public List<RowListener> getRowListeners()
{
return rowListeners;
}
public void addResultFile(ResultFile resultFile)
{
resultFiles.put(resultFile.getFile().toString(), resultFile);
}
public Map<String,ResultFile> getResultFiles()
{
return resultFiles;
}
/**
* @return Returns true is this step is running in safe mode, with extra checking enabled...
*/
public boolean isSafeModeEnabled()
{
return safeModeEnabled;
}
/**
* @param safeModeEnabled set to true is this step has to be running in safe mode, with extra checking enabled...
*/
public void setSafeModeEnabled(boolean safeModeEnabled)
{
this.safeModeEnabled = safeModeEnabled;
}
public int getStatus()
{
if (isStopped()) return StepDataInterface.STATUS_STOPPED;
if (isPaused()) return StepDataInterface.STATUS_PAUSED;
if (isAlive()) return StepDataInterface.STATUS_RUNNING;
// Get the rest in StepDataInterface object:
StepDataInterface sdi = trans.getStepDataInterface(stepname, stepcopy);
if (sdi != null)
{
if (sdi.getStatus() == StepDataInterface.STATUS_DISPOSED && !isAlive()) return StepDataInterface.STATUS_FINISHED;
return sdi.getStatus();
}
return StepDataInterface.STATUS_EMPTY;
}
/**
* @return the partitionID
*/
public String getPartitionID()
{
return partitionID;
}
/**
* @param partitionID the partitionID to set
*/
public void setPartitionID(String partitionID)
{
this.partitionID = partitionID;
}
/**
* @return the partitionTargets
*/
public Map<String,RowSet> getPartitionTargets()
{
return partitionTargets;
}
/**
* @param partitionTargets the partitionTargets to set
*/
public void setPartitionTargets(Map<String,RowSet> partitionTargets)
{
this.partitionTargets = partitionTargets;
}
/**
* @return the repartitioning type
*/
public int getRepartitioning()
{
return repartitioning;
}
/**
* @param repartitioning the repartitioning type to set
*/
public void setRepartitioning(int repartitioning)
{
this.repartitioning = repartitioning;
}
/**
* @return the partitioned
*/
public boolean isPartitioned()
{
return partitioned;
}
/**
* @param partitioned the partitioned to set
*/
public void setPartitioned(boolean partitioned)
{
this.partitioned = partitioned;
}
protected boolean checkFeedback(long lines)
{
return getTransMeta().isFeedbackShown() && (lines > 0) && (getTransMeta().getFeedbackSize() > 0)
&& (lines % getTransMeta().getFeedbackSize()) == 0;
}
/**
* @return the rowMeta
*/
public RowMetaInterface getInputRowMeta()
{
return inputRowMeta;
}
/**
* @param rowMeta the rowMeta to set
*/
public void setInputRowMeta(RowMetaInterface rowMeta)
{
this.inputRowMeta = rowMeta;
}
/**
* @return the errorRowMeta
*/
public RowMetaInterface getErrorRowMeta()
{
return errorRowMeta;
}
/**
* @param errorRowMeta the errorRowMeta to set
*/
public void setErrorRowMeta(RowMetaInterface errorRowMeta)
{
this.errorRowMeta = errorRowMeta;
}
/**
* @return the previewRowMeta
*/
public RowMetaInterface getPreviewRowMeta()
{
return previewRowMeta;
}
/**
* @param previewRowMeta the previewRowMeta to set
*/
public void setPreviewRowMeta(RowMetaInterface previewRowMeta)
{
this.previewRowMeta = previewRowMeta;
}
public void copyVariablesFrom(VariableSpace space)
{
variables.copyVariablesFrom(space);
}
public String environmentSubstitute(String aString)
{
return variables.environmentSubstitute(aString);
}
public String[] environmentSubstitute(String aString[])
{
return variables.environmentSubstitute(aString);
}
public VariableSpace getParentVariableSpace()
{
return variables.getParentVariableSpace();
}
public void setParentVariableSpace(VariableSpace parent)
{
variables.setParentVariableSpace(parent);
}
public String getVariable(String variableName, String defaultValue)
{
return variables.getVariable(variableName, defaultValue);
}
public String getVariable(String variableName)
{
return variables.getVariable(variableName);
}
public boolean getBooleanValueOfVariable(String variableName, boolean defaultValue) {
if (!Const.isEmpty(variableName))
{
String value = environmentSubstitute(variableName);
if (!Const.isEmpty(value))
{
return ValueMeta.convertStringToBoolean(value);
}
}
return defaultValue;
}
public void initializeVariablesFrom(VariableSpace parent)
{
variables.initializeVariablesFrom(parent);
}
public String[] listVariables()
{
return variables.listVariables();
}
public void setVariable(String variableName, String variableValue)
{
variables.setVariable(variableName, variableValue);
}
public void shareVariablesWith(VariableSpace space)
{
variables = space;
}
public void injectVariables(Map<String,String> prop)
{
variables.injectVariables(prop);
}
/**
* Support for CheckResultSourceInterface
*/
public String getTypeId() {
return this.getStepID();
}
/**
* @return the unique slave number in the cluster
*/
public int getSlaveNr() {
return slaveNr;
}
/**
* @return the cluster size
*/
public int getClusterSize() {
return clusterSize;
}
/**
* @return a unique step number across all slave servers: slaveNr * nrCopies + copyNr
*/
public int getUniqueStepNrAcrossSlaves() {
return uniqueStepNrAcrossSlaves;
}
/**
* @return the number of unique steps across all slave servers
*/
public int getUniqueStepCountAcrossSlaves() {
return uniqueStepCountAcrossSlaves;
}
/**
* @return the serverSockets
*/
public List<ServerSocket> getServerSockets() {
return serverSockets;
}
/**
* @param serverSockets the serverSockets to set
*/
public void setServerSockets(List<ServerSocket> serverSockets) {
this.serverSockets = serverSockets;
}
/**
* @param usingThreadPriorityManagment set to true to actively manage priorities of step threads
*/
public void setUsingThreadPriorityManagment(boolean usingThreadPriorityManagment) {
this.usingThreadPriorityManagment = usingThreadPriorityManagment;
}
/**
* @return true if we are actively managing priorities of step threads
*/
public boolean isUsingThreadPriorityManagment() {
return usingThreadPriorityManagment;
}
/**
* This method is executed by Trans right before the threads start and right after initialization.
*
* More to the point: here we open remote output step sockets.
*
* @throws KettleStepException In case there is an error
*/
public void initBeforeStart() throws KettleStepException {
openRemoteOutputStepSocketsOnce();
}
public static void runStepThread(StepInterface stepInterface, StepMetaInterface meta, StepDataInterface data) {
LogWriter log = LogWriter.getInstance();
try
{
if (log.isDetailed()) log.logDetailed(stepInterface.toString(), Messages.getString("System.Log.StartingToRun")); //$NON-NLS-1$
while (stepInterface.processRow(meta, data) && !stepInterface.isStopped());
}
catch(Throwable t)
{
try
{
//check for OOME
if(t instanceof OutOfMemoryError) {
// Handle this different with as less overhead as possible to get an error message in the log.
// Otherwise it crashes likely with another OOME in Me$$ages.getString() and does not log
// nor call the setErrors() and stopAll() below.
log.logError(stepInterface.toString(), "UnexpectedError: " + t.toString()); //$NON-NLS-1$
} else {
log.logError(stepInterface.toString(), Messages.getString("System.Log.UnexpectedError")+" : "); //$NON-NLS-1$ //$NON-NLS-2$
}
log.logError(stepInterface.toString(), Const.getStackTracker(t));
}
catch(OutOfMemoryError e)
{
e.printStackTrace();
}
finally
{
stepInterface.setErrors(1);
stepInterface.stopAll();
}
}
finally
{
stepInterface.dispose(meta, data);
try {
long li = stepInterface.getLinesInput();
long lo = stepInterface.getLinesOutput();
long lr = stepInterface.getLinesRead();
long lw = stepInterface.getLinesWritten();
long lu = stepInterface.getLinesUpdated();
long lj = stepInterface.getLinesRejected();
long e = stepInterface.getErrors();
if (li > 0 || lo > 0 || lr > 0 || lw > 0 || lu > 0 || lj > 0 || e > 0)
log.logBasic(stepInterface.toString(), Messages.getString("BaseStep.Log.SummaryInfo", String.valueOf(li), String.valueOf(lo), String.valueOf(lr), String.valueOf(lw), String.valueOf(lw), String.valueOf(e+lj)));
else
log.logDetailed(stepInterface.toString(), Messages.getString("BaseStep.Log.SummaryInfo", String.valueOf(li), String.valueOf(lo), String.valueOf(lr), String.valueOf(lw), String.valueOf(lw), String.valueOf(e+lj)));
} catch(Throwable t) {
// it's likely an OOME, thus no overhead by Me$$ages.getString(), see above
log.logError(stepInterface.toString(), "UnexpectedError: " + t.toString()); //$NON-NLS-1$
} finally {
stepInterface.markStop();
}
}
}
/**
* @return the stepListeners
*/
public List<StepListener> getStepListeners() {
return stepListeners;
}
/**
* @param stepListeners the stepListeners to set
*/
public void setStepListeners(List<StepListener> stepListeners) {
this.stepListeners = stepListeners;
}
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
return false;
}
public void addStepListener(StepListener stepListener) {
stepListeners.add(stepListener);
}
public boolean isMapping() {
return stepMeta.isMapping();
}
}
|
package dyvil.tools.compiler.parser.expression;
import dyvil.tools.compiler.ast.access.*;
import dyvil.tools.compiler.ast.annotation.Annotation;
import dyvil.tools.compiler.ast.annotation.AnnotationValue;
import dyvil.tools.compiler.ast.bytecode.Bytecode;
import dyvil.tools.compiler.ast.classes.IClass;
import dyvil.tools.compiler.ast.classes.IClassBody;
import dyvil.tools.compiler.ast.constant.*;
import dyvil.tools.compiler.ast.consumer.ITypeConsumer;
import dyvil.tools.compiler.ast.consumer.IValueConsumer;
import dyvil.tools.compiler.ast.expression.*;
import dyvil.tools.compiler.ast.generic.GenericData;
import dyvil.tools.compiler.ast.member.Name;
import dyvil.tools.compiler.ast.operator.*;
import dyvil.tools.compiler.ast.parameter.*;
import dyvil.tools.compiler.ast.pattern.ICase;
import dyvil.tools.compiler.ast.statement.*;
import dyvil.tools.compiler.ast.type.IType;
import dyvil.tools.compiler.ast.type.ITyped;
import dyvil.tools.compiler.lexer.position.ICodePosition;
import dyvil.tools.compiler.lexer.token.IToken;
import dyvil.tools.compiler.parser.IParserManager;
import dyvil.tools.compiler.parser.Parser;
import dyvil.tools.compiler.parser.annotation.AnnotationParser;
import dyvil.tools.compiler.parser.bytecode.BytecodeParser;
import dyvil.tools.compiler.parser.classes.ClassBodyParser;
import dyvil.tools.compiler.parser.statement.*;
import dyvil.tools.compiler.parser.type.TypeListParser;
import dyvil.tools.compiler.parser.type.TypeParser;
import dyvil.tools.compiler.transform.Keywords;
import dyvil.tools.compiler.transform.Symbols;
import dyvil.tools.compiler.transform.Tokens;
import dyvil.tools.compiler.util.ParserUtil;
import dyvil.tools.compiler.util.Util;
public final class ExpressionParser extends Parser implements ITypeConsumer, IValueConsumer
{
public static final int VALUE = 0x1;
public static final int ACCESS = 0x2;
public static final int DOT_ACCESS = 0x4;
public static final int STATEMENT = 0x8;
public static final int TYPE = 0x10;
public static final int CONSTRUCTOR = 0x20;
public static final int CONSTRUCTOR_END = 0x40;
public static final int ANONYMOUS_CLASS_END = 0x80;
public static final int CONSTRUCTOR_PARAMETERS = 0x100;
public static final int PARAMETERS_END = 0x2000;
public static final int SUBSCRIPT_END = 0x4000;
public static final int TYPE_ARGUMENTS_END = 0x8000;
public static final int BYTECODE_END = 0x10000;
public static final int PATTERN_IF = 0x20000;
public static final int PATTERN_END = 0x40000;
public static final int PARAMETERIZED_THIS_END = 0x80000;
public static final int PARAMETERIZED_SUPER_END = 0x100000;
protected IValueConsumer valueConsumer;
private IValue value;
private boolean explicitDot;
private Operator operator;
public ExpressionParser(IValueConsumer field)
{
this.mode = VALUE;
this.valueConsumer = field;
}
public void reset(IValueConsumer field)
{
this.mode = VALUE;
this.valueConsumer = field;
this.value = null;
this.explicitDot = false;
this.operator = null;
}
@Override
public void parse(IParserManager pm, IToken token)
{
if (this.mode == END)
{
if (this.value != null)
{
this.valueConsumer.setValue(this.value);
}
pm.popParser(true);
return;
}
int type = token.type();
switch (type)
{
case Symbols.SEMICOLON:
case Symbols.COLON:
case Symbols.COMMA:
case Tokens.STRING_PART:
case Tokens.STRING_END:
if (this.value != null)
{
this.valueConsumer.setValue(this.value);
}
pm.popParser(true);
return;
}
switch (this.mode)
{
case VALUE:
switch (type)
{
case Tokens.STRING:
this.value = new StringValue(token.raw(), token.stringValue());
this.mode = ACCESS;
return;
case Tokens.STRING_START:
{
FormatStringExpression ssv = new FormatStringExpression(token);
this.value = ssv;
this.mode = ACCESS;
pm.pushParser(new FormatStringParser(ssv), true);
return;
}
case Tokens.CHAR:
this.value = new CharValue(token.raw(), token.charValue());
this.mode = ACCESS;
return;
case Tokens.INT:
this.value = new IntValue(token.raw(), token.intValue());
this.mode = ACCESS;
return;
case Tokens.LONG:
this.value = new LongValue(token.raw(), token.longValue());
this.mode = ACCESS;
return;
case Tokens.FLOAT:
this.value = new FloatValue(token.raw(), token.floatValue());
this.mode = ACCESS;
return;
case Tokens.DOUBLE:
this.value = new DoubleValue(token.raw(), token.doubleValue());
this.mode = ACCESS;
return;
case Symbols.ELLIPSIS:
this.value = new WildcardValue(token.raw());
this.mode = ACCESS;
return;
case Symbols.WILDCARD:
return;
case Symbols.OPEN_PARENTHESIS:
IToken next = token.next();
if (next.type() == Symbols.CLOSE_PARENTHESIS)
{
if (next.next().type() == Symbols.ARROW_OPERATOR)
{
LambdaExpression le = new LambdaExpression(next.next().raw());
this.value = le;
pm.skip(2);
pm.pushParser(pm.newExpressionParser(le));
this.mode = ACCESS;
return;
}
this.value = new VoidValue(token.to(token.next()));
pm.skip();
this.mode = END;
return;
}
pm.pushParser(new LambdaOrTupleParser(this), true);
this.mode = ACCESS;
return;
case Symbols.OPEN_SQUARE_BRACKET:
this.mode = ACCESS;
pm.pushParser(new ArrayLiteralParser(this), true);
return;
case Symbols.OPEN_CURLY_BRACKET:
this.mode = END;
pm.pushParser(new StatementListParser(this), true);
return;
case Symbols.AT:
if (token.next().type() == Symbols.OPEN_CURLY_BRACKET)
{
Bytecode bc = new Bytecode(token);
pm.skip();
pm.pushParser(new BytecodeParser(bc));
this.mode = BYTECODE_END;
this.value = bc;
return;
}
Annotation a = new Annotation();
pm.pushParser(new AnnotationParser(a));
this.value = new AnnotationValue(a);
this.mode = 0;
return;
case Symbols.ARROW_OPERATOR:
LambdaExpression le = new LambdaExpression(token.raw());
this.value = le;
this.mode = ACCESS;
pm.pushParser(pm.newExpressionParser(le));
return;
}
if ((type & Tokens.IDENTIFIER) != 0)
{
// Identifier at the beginning of an expression
Name name = token.nameValue();
this.parseAccess(pm, token, type, name, pm.getOperator(name));
return;
}
if (this.parseKeyword(pm, token, type))
{
return;
}
this.mode = ACCESS;
// Leave the big switch and jump right over to the ACCESS
// section
break;
case PATTERN_IF:
this.mode = PATTERN_END;
if (type == Keywords.IF)
{
pm.pushParser(pm.newExpressionParser(v -> ((ICase) this.value).setCondition(v)));
return;
}
//$FALL-THROUGH$
case PATTERN_END:
if (type == Symbols.COLON || type == Symbols.ARROW_OPERATOR)
{
this.mode = END;
if (token.next().type() != Keywords.CASE)
{
pm.pushParser(pm.newExpressionParser(v -> ((ICase) this.value).setAction(v)));
}
return;
}
pm.report(token, "Invalid Pattern - ':' expected");
return;
case ANONYMOUS_CLASS_END:
this.value.expandPosition(token);
this.mode = ACCESS;
if (type != Symbols.CLOSE_CURLY_BRACKET)
{
pm.reparse();
pm.report(token, "Invalid Anonymous Class List - '}' expected");
}
return;
case PARAMETERS_END:
this.mode = ACCESS;
this.value.expandPosition(token);
if (type != Symbols.CLOSE_PARENTHESIS)
{
pm.reparse();
pm.report(token, "Invalid Argument List - ')' expected");
}
return;
case SUBSCRIPT_END:
this.mode = ACCESS;
this.value.expandPosition(token);
if (type != Symbols.CLOSE_SQUARE_BRACKET)
{
pm.reparse();
pm.report(token, "Invalid Subscript Arguments - ']' expected");
}
return;
case CONSTRUCTOR:
{
ConstructorCall cc = (ConstructorCall) this.value;
if (type == Symbols.OPEN_CURLY_BRACKET)
{
this.parseBody(pm, cc.toClassConstructor());
return;
}
this.mode = CONSTRUCTOR_PARAMETERS;
pm.reparse();
return;
}
case CONSTRUCTOR_PARAMETERS:
{
ICall icall = (ICall) this.value;
if (type == Symbols.OPEN_PARENTHESIS)
{
IArguments arguments = this.parseArguments(pm, token.next());
icall.setArguments(arguments);
this.mode = CONSTRUCTOR_END;
return;
}
if (ParserUtil.isExpressionTerminator(type))
{
this.mode = ACCESS;
pm.reparse();
return;
}
SingleArgument sa = new SingleArgument();
icall.setArguments(sa);
ExpressionParser ep = (ExpressionParser) pm.newExpressionParser(sa);
ep.operator = Operators.DEFAULT;
pm.pushParser(ep, true);
this.mode = END;
return;
}
case CONSTRUCTOR_END:
if (type != Symbols.CLOSE_PARENTHESIS)
{
pm.reparse();
pm.report(token, "Invalid Constructor Argument List - ')' expected");
}
this.value.expandPosition(token);
this.mode = ACCESS;
if (token.next().type() == Symbols.OPEN_CURLY_BRACKET)
{
pm.skip();
this.parseBody(pm, ((ConstructorCall) this.value).toClassConstructor());
return;
}
return;
case BYTECODE_END:
this.valueConsumer.setValue(this.value);
pm.popParser();
this.value.expandPosition(token);
if (type != Symbols.CLOSE_CURLY_BRACKET)
{
pm.reparse();
pm.report(token, "Invalid Bytecode Expression - '}' expected");
}
return;
case TYPE_ARGUMENTS_END:
MethodCall mc = (MethodCall) this.value;
IToken next = token.next();
if (next.type() == Symbols.OPEN_PARENTHESIS)
{
pm.skip();
mc.setArguments(this.parseArguments(pm, next.next()));
}
this.mode = ACCESS;
if (type != Symbols.CLOSE_SQUARE_BRACKET)
{
pm.report(token, "Invalid Method Type Parameter List - ']' expected");
}
return;
case PARAMETERIZED_THIS_END:
this.mode = ACCESS;
if (type != Symbols.CLOSE_SQUARE_BRACKET)
{
pm.report(token, "Invalid this Expression - ']' expected");
}
return;
case PARAMETERIZED_SUPER_END:
this.mode = ACCESS;
if (type != Symbols.CLOSE_SQUARE_BRACKET)
{
pm.report(token, "Invalid super Expression - ']' expected");
}
return;
}
if (ParserUtil.isCloseBracket(type))
{
// Close bracket, end expression
if (this.value != null)
{
this.valueConsumer.setValue(this.value);
}
pm.popParser(true);
return;
}
if (this.mode == ACCESS)
{
if (type == Symbols.DOT)
{
this.mode = DOT_ACCESS;
this.explicitDot = true;
return;
}
this.explicitDot = false;
switch (type)
{
case Keywords.ELSE:
this.valueConsumer.setValue(this.value);
pm.popParser(true);
return;
case Symbols.EQUALS:
this.parseAssignment(pm, token);
return;
case Keywords.AS:
CastOperator co = new CastOperator(token.raw(), this.value);
pm.pushParser(pm.newTypeParser(co));
this.value = co;
return;
case Keywords.IS:
InstanceOfOperator io = new InstanceOfOperator(token.raw(), this.value);
pm.pushParser(pm.newTypeParser(io));
this.value = io;
return;
case Keywords.MATCH:
// Parse a match expression
// e.g. int1 match { ... }, this match { ... }
MatchExpression me = new MatchExpression(token.raw(), this.value);
pm.pushParser(new MatchExpressionParser(me));
this.value = me;
return;
case Symbols.OPEN_SQUARE_BRACKET:
// Parse a subscript getter
// e.g. this[1], array[0]
SubscriptGetter getter = new SubscriptGetter(token, this.value);
this.value = getter;
this.mode = SUBSCRIPT_END;
pm.pushParser(new ExpressionListParser(getter.getArguments()));
return;
case Symbols.OPEN_PARENTHESIS:
// Parse an apply call
// e.g. 1("a"), this("stuff"), "myString"(2)
ApplyMethodCall amc = new ApplyMethodCall(this.value.getPosition(), this.value, this.parseArguments(pm, token.next()));
this.value = amc;
this.mode = PARAMETERS_END;
return;
}
if (ParserUtil.isIdentifier(type))
{
this.parseIdentifierAccess(pm, token, type);
return;
}
if (this.operator != null)
{
this.valueConsumer.setValue(this.value);
pm.popParser(true);
return;
}
SingleArgument sa = new SingleArgument();
ApplyMethodCall amc = new ApplyMethodCall(this.value.getPosition(), this.value, sa);
this.parseApply(pm, token, sa, Operators.DEFAULT);
pm.reparse();
this.value = amc;
return;
}
if (this.mode == DOT_ACCESS)
{
if (ParserUtil.isIdentifier(type))
{
this.parseIdentifierAccess(pm, token, type);
return;
}
pm.report(token, "Invalid Dot Access - Invalid " + token);
return;
}
pm.report(token, "Invalid Expression - Invalid " + token);
return;
}
/**
* Creates the body and initializes parsing for anonymous classes.
*
* @param pm
* the current parsing context manager.
* @param cc
* the anonymous class AST node.
*/
private void parseBody(IParserManager pm, ClassConstructor cc)
{
IClass iclass = cc.getNestedClass();
IClassBody body = iclass.getBody();
pm.pushParser(new ClassBodyParser(iclass, body));
this.mode = ANONYMOUS_CLASS_END;
this.value = cc;
return;
}
private IArguments parseArguments(IParserManager pm, IToken next)
{
int type = next.type();
if (type == Symbols.CLOSE_PARENTHESIS)
{
return EmptyArguments.VISIBLE;
}
if (ParserUtil.isIdentifier(type) && next.next().type() == Symbols.COLON)
{
ArgumentMap map = new ArgumentMap();
pm.pushParser(new ExpressionMapParser(map));
return map;
}
ArgumentList list = new ArgumentList();
pm.pushParser(new ExpressionListParser(list));
return list;
}
/**
* Parses an ACCESS sequence.
*
* @param pm
* the current parsing context manager.
* @param token
* the current token, has to be any {@code IDENTIFIER} token.
* @param type
* the {@code type} of the current {@code token}.
* @param name
* the {@code nameValue} of the {@code token}.
*/
private void parseAccess(IParserManager pm, IToken token, int type, Name name, Operator op)
{
IToken next = token.next();
int nextType = next.type();
if (op != null && !this.explicitDot)
{
if (this.value == null)
{
SingleArgument sa = new SingleArgument();
MethodCall call = new MethodCall(token, null, name, sa);
call.setDotless(!this.explicitDot);
this.value = call;
this.mode = ACCESS;
this.parseApply(pm, token.next(), sa, op);
return;
}
MethodCall call = new MethodCall(token, this.value, name);
call.setDotless(!this.explicitDot);
this.value = call;
this.mode = ACCESS;
if (op.type != Operator.POSTFIX && !ParserUtil.isExpressionTerminator(nextType))
{
SingleArgument sa = new SingleArgument();
call.setArguments(sa);
this.parseApply(pm, token, sa, op);
}
return;
}
// Name is not a compound operator (does not end with '=')
if (name.qualified.endsWith("$eq"))
{
// e.g. this += that
op = pm.getOperator(Util.stripEq(name));
}
else
{
switch (nextType)
{
case Symbols.OPEN_PARENTHESIS:
MethodCall call = new MethodCall(token.raw(), this.value, name);
call.setDotless(!this.explicitDot);
this.value = call;
this.mode = PARAMETERS_END;
pm.skip();
call.setArguments(this.parseArguments(pm, next.next()));
return;
case Symbols.OPEN_SQUARE_BRACKET:
SubscriptGetter getter = new SubscriptGetter(token, new FieldAccess(token.raw(), this.value, name));
this.value = getter;
this.mode = SUBSCRIPT_END;
pm.skip();
pm.pushParser(new ExpressionListParser(getter.getArguments()));
return;
case Symbols.ARROW_OPERATOR:
LambdaExpression lv = new LambdaExpression(next.raw(), new MethodParameter(token.raw(), token.nameValue()));
this.mode = END;
this.value = lv;
pm.pushParser(pm.newExpressionParser(lv));
pm.skip();
return;
case Symbols.GENERIC_CALL:
MethodCall mc = new MethodCall(token.raw(), this.value, token.nameValue());
GenericData gd = new GenericData();
mc.setGenericData(gd);
mc.setDotless(!this.explicitDot);
this.value = mc;
this.mode = TYPE_ARGUMENTS_END;
pm.skip();
pm.pushParser(new TypeListParser(gd));
return;
}
// ... EXPRESSION-TERMINATOR
// e.g. this.someField ;
if (ParserUtil.isExpressionTerminator(nextType))
{
FieldAccess access = new FieldAccess(token, this.value, name);
access.setDotless(!this.explicitDot);
this.value = access;
this.mode = ACCESS;
return;
}
// ... IDENTIFIER ...
// e.g. this call ...
if (ParserUtil.isIdentifier(nextType))
{
// ... OPERATOR ...
// e.g. this + that
// ... IDENTIFIER NON-EXPRESSION-TERMINATOR
// e.g. this.plus that
if (ParserUtil.isOperator(pm, next, nextType) || !ParserUtil.isExpressionTerminator(next.next().type()))
{
FieldAccess access = new FieldAccess(token, this.value, name);
access.setDotless(!this.explicitDot);
this.value = access;
this.mode = ACCESS;
return;
}
}
// else ->
// e.g. this.call 10;
}
SingleArgument sa = new SingleArgument();
MethodCall call = new MethodCall(token, this.value, name, sa);
call.setDotless(!this.explicitDot);
this.value = call;
this.mode = ACCESS;
this.parseApply(pm, token.next(), sa, op == null ? Operators.DEFAULT : op);
return;
}
private void parseIdentifierAccess(IParserManager pm, IToken token, int type)
{
Name name = token.nameValue();
Operator operator = pm.getOperator(name);
if (!this.explicitDot && this.operator != null)
{
// Handle operator precedence
int p;
if (operator == null || (p = this.operator.precedence) > operator.precedence)
{
this.valueConsumer.setValue(this.value);
pm.popParser(true);
return;
}
if (p == operator.precedence)
{
// Handle associativity
switch (operator.type)
{
case Operator.INFIX_LEFT:
this.valueConsumer.setValue(this.value);
pm.popParser(true);
return;
case Operator.INFIX_NONE:
pm.report(token, "Invalid Operator " + name + " - Operator without associativity is not allowed at this location");
return;
case Operator.INFIX_RIGHT:
}
}
}
this.parseAccess(pm, token, type, name, operator);
return;
}
/**
* Parses an APPLY call, without parenthesis. It might be possible that
* {@code pm.reparse()} has to be called after this method, depending on the
* token that is passed. E.g.:
* <p>
*
* <pre>
* this 3
* print "abc"
* button { ... }
* </pre>
*
* @param pm
* the current parsing context manager
* @param token
* the first token of the expression that is a parameter to the
* APPLY method
* @param sa
* the argument container
* @param op
* the operator that precedes this call. Can be null.
*/
private void parseApply(IParserManager pm, IToken token, SingleArgument sa, Operator op)
{
if (token.type() == Symbols.OPEN_CURLY_BRACKET)
{
StatementListParser slp = new StatementListParser(sa);
slp.setApplied(true);
pm.pushParser(slp);
return;
}
ExpressionParser ep = (ExpressionParser) pm.newExpressionParser(sa);
ep.operator = op;
pm.pushParser(ep);
}
/**
* Parses an assignment based on the current {@code value}.
*
* @param pm
* the current parsing context manager
* @param token
* the current token, i.e. the '=' sign
*/
private void parseAssignment(IParserManager pm, IToken token)
{
if (this.value == null)
{
this.mode = VALUE;
pm.report(token, "Invalid Assignment - Delete this token");
return;
}
ICodePosition position = this.value.getPosition();
int valueType = this.value.valueTag();
switch (valueType)
{
case IValue.FIELD_ACCESS:
{
FieldAccess fa = (FieldAccess) this.value;
FieldAssign assign = new FieldAssign(position, fa.getInstance(), fa.getName());
this.value = assign;
pm.pushParser(pm.newExpressionParser(assign));
return;
}
case IValue.APPLY_CALL:
{
ApplyMethodCall call = (ApplyMethodCall) this.value;
UpdateMethodCall updateCall = new UpdateMethodCall(position, call.getValue(), call.getArguments());
this.value = updateCall;
pm.pushParser(pm.newExpressionParser(updateCall));
return;
}
case IValue.METHOD_CALL:
{
MethodCall call = (MethodCall) this.value;
FieldAccess fa = new FieldAccess(position, call.getValue(), call.getName());
UpdateMethodCall updateCall = new UpdateMethodCall(position, fa, call.getArguments());
this.value = updateCall;
pm.pushParser(pm.newExpressionParser(updateCall));
return;
}
case IValue.SUBSCRIPT_GET:
{
SubscriptGetter getter = (SubscriptGetter) this.value;
SubscriptSetter setter = new SubscriptSetter(position, getter.getValue(), getter.getArguments());
this.value = setter;
pm.pushParser(pm.newExpressionParser(setter));
return;
}
}
pm.report(token, "Invalid Assignment");
return;
}
private boolean parseKeyword(IParserManager pm, IToken token, int type)
{
switch (type)
{
case Keywords.NULL:
this.value = new NullValue(token.raw());
this.mode = ACCESS;
return true;
case Keywords.NIL:
this.value = new NilValue(token.raw());
this.mode = ACCESS;
return true;
case Keywords.TRUE:
this.value = new BooleanValue(token.raw(), true);
this.mode = ACCESS;
return true;
case Keywords.FALSE:
this.value = new BooleanValue(token.raw(), false);
this.mode = ACCESS;
return true;
case Keywords.THIS:
{
IToken next = token.next();
switch (next.type())
{
// this[type]
case Symbols.OPEN_SQUARE_BRACKET:
ThisValue tv = new ThisValue(token.raw());
this.mode = PARAMETERIZED_THIS_END;
this.value = tv;
pm.skip();
pm.pushParser(new TypeParser(tv));
return true;
case Symbols.DOT:
// this.new
IToken next2 = next.next();
if (next2.type() == Keywords.NEW)
{
this.value = new InitializerCall(next2.raw(), false);
pm.skip(2);
this.mode = CONSTRUCTOR_PARAMETERS;
return true;
}
}
this.value = new ThisValue(token.raw());
this.mode = ACCESS;
return true;
}
case Keywords.SUPER:
{
IToken next = token.next();
switch (next.type())
{
// super[type]
case Symbols.OPEN_SQUARE_BRACKET:
SuperValue sv = new SuperValue(token.raw());
this.mode = PARAMETERIZED_SUPER_END;
this.value = sv;
pm.skip();
pm.pushParser(new TypeParser(sv));
return true;
case Symbols.DOT:
// super.new
IToken next2 = next.next();
if (next2.type() == Keywords.NEW)
{
this.value = new InitializerCall(next2.raw(), true);
pm.skip(2);
this.mode = CONSTRUCTOR_PARAMETERS;
return true;
}
}
this.value = new SuperValue(token.raw());
this.mode = ACCESS;
return true;
}
case Keywords.CLASS:
{
ClassOperator co = new ClassOperator(token);
this.value = co;
pm.pushParser(pm.newTypeParser(co));
this.mode = ACCESS;
return true;
}
case Keywords.TYPE:
{
TypeOperator to = new TypeOperator(token);
this.value = to;
pm.pushParser(pm.newTypeParser(to));
this.mode = ACCESS;
return true;
}
case Keywords.NEW:
{
ConstructorCall call = new ConstructorCall(token);
this.mode = CONSTRUCTOR;
this.value = call;
pm.pushParser(pm.newTypeParser(this));
return true;
}
case Keywords.RETURN:
{
ReturnStatement rs = new ReturnStatement(token.raw());
this.value = rs;
pm.pushParser(pm.newExpressionParser(rs));
return true;
}
case Keywords.IF:
{
IfStatement is = new IfStatement(token.raw());
this.value = is;
pm.pushParser(new IfStatementParser(is));
this.mode = END;
return true;
}
case Keywords.ELSE:
{
if (!(this.parent instanceof IfStatementParser))
{
pm.report(token, "Invalid Expression - 'else' not allowed at this location");
return true;
}
this.valueConsumer.setValue(this.value);
pm.popParser(true);
return true;
}
case Keywords.WHILE:
{
WhileStatement statement = new WhileStatement(token);
this.value = statement;
pm.pushParser(new WhileStatementParser(statement));
this.mode = END;
return true;
}
case Keywords.DO:
{
DoStatement statement = new DoStatement(token);
this.value = statement;
pm.pushParser(new DoStatementParser(statement));
this.mode = END;
return true;
}
case Keywords.FOR:
{
pm.pushParser(new ForStatementParser(this.valueConsumer, token.raw()));
this.mode = END;
return true;
}
case Keywords.BREAK:
{
BreakStatement statement = new BreakStatement(token);
this.value = statement;
IToken next = token.next();
if (ParserUtil.isIdentifier(next.type()))
{
statement.setName(next.nameValue());
pm.skip();
}
this.mode = END;
return true;
}
case Keywords.CONTINUE:
{
ContinueStatement statement = new ContinueStatement(token);
this.value = statement;
IToken next = token.next();
if (ParserUtil.isIdentifier(next.type()))
{
statement.setName(next.nameValue());
pm.skip();
}
this.mode = END;
return true;
}
case Keywords.GOTO:
{
GoToStatement statement = new GoToStatement(token);
this.value = statement;
IToken next = token.next();
if (ParserUtil.isIdentifier(next.type()))
{
statement.setName(next.nameValue());
pm.skip();
}
this.mode = END;
return true;
}
/*
* case Keywords.CASE: { CaseExpression pattern = new
* CaseExpression(token.raw()); pm.pushParser(new
* PatternParser(pattern)); this.mode = PATTERN_IF; this.value =
* pattern; return true; }
*/
case Keywords.TRY:
{
TryStatement statement = new TryStatement(token.raw());
pm.pushParser(new TryStatementParser(statement));
this.mode = END;
this.value = statement;
return true;
}
case Keywords.CATCH:
{
if (!(this.parent instanceof TryStatementParser))
{
pm.report(token, "Invalid Expression - 'catch' not allowed at this location");
return true;
}
this.valueConsumer.setValue(this.value);
pm.popParser(true);
return true;
}
case Keywords.FINALLY:
{
if (!(this.parent instanceof TryStatementParser))
{
pm.report(token, "Invalid Expression - 'finally' not allowed at this location");
return true;
}
this.valueConsumer.setValue(this.value);
pm.popParser(true);
return true;
}
case Keywords.THROW:
{
ThrowStatement statement = new ThrowStatement(token.raw());
pm.pushParser(pm.newExpressionParser(statement));
this.mode = END;
this.value = statement;
return true;
}
case Keywords.SYNCHRONIZED:
{
SyncStatement statement = new SyncStatement(token.raw());
pm.pushParser(new SyncStatementParser(statement));
this.mode = END;
this.value = statement;
return true;
}
}
return false;
}
@Override
public void setType(IType type)
{
if (this.value == null)
{
this.value = new ClassAccess(type.getPosition(), type);
}
else
{
((ITyped) this.value).setType(type);
}
}
@Override
public void setValue(IValue value)
{
this.value = value;
}
}
|
package edu.psu.compbio.seqcode.projects.akshay.chexmix.analysis;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import cern.colt.Arrays;
import edu.psu.compbio.seqcode.gse.datasets.motifs.WeightMatrix;
import edu.psu.compbio.seqcode.gse.utils.Pair;
import edu.psu.compbio.seqcode.projects.akshay.chexmix.datasets.BindingLocation;
import edu.psu.compbio.seqcode.projects.akshay.chexmix.datasets.Config;
import edu.psu.compbio.seqcode.projects.akshay.chexmix.datasets.CustomReturn;
import edu.psu.compbio.seqcode.projects.akshay.chexmix.datasets.Membership;
import edu.psu.compbio.seqcode.projects.akshay.chexmix.utils.ChexmixSandbox;
import edu.psu.compbio.seqcode.projects.akshay.chexmix.utils.QueryGenome;
import edu.psu.compbio.seqcode.projects.akshay.chexmix.utils.QueryMm10;
public class Chexmix {
public Config c;
public List<int[]> profiles_in_the_dataset = new ArrayList<int[]>();
public List<Membership> cluster_assignment = new ArrayList<Membership>();
public double[][] cluster_assessment;
public Chexmix(Config conf) {
this.c = conf;
}
public static void main(String[] args) throws IOException{
Config c = new Config(args);
Chexmix driver = new Chexmix(c);
if(driver.c.helpWanted()){
System.err.println("Chexmix:");
System.err.println(driver.c.getArgsList());
}
else{
System.currentTimeMillis();
System.out.println("\n============================ Loading Tags/Reads ============================");
LoadTags tagsloader = new LoadTags();
tagsloader.loadHits(driver.c, driver.c.useNonUnique());
System.out.println("\n============================ Filling Binding Locations ============================");
BufferedReader brpeaks = new BufferedReader(new FileReader(driver.c.getPeaksFilePath()));
List<BindingLocation> allbls = new ArrayList<BindingLocation>();
String currentline = brpeaks.readLine();
Map<BindingLocation, String> motif_orientation = new HashMap<BindingLocation, String>();
Map<BindingLocation, Double> location_coverage = new HashMap<BindingLocation, Double>();
while(currentline != null){
String[] pieces = currentline.split("\t");
int tempMidpoint = Integer.parseInt(pieces[3])+((Integer.parseInt(pieces[4])-Integer.parseInt(pieces[3]))/2);
String tempChr = pieces[0];
BindingLocation temploc = new BindingLocation(tempMidpoint, tempChr, driver.c);
temploc.filltags(tagsloader);
motif_orientation.put(temploc, pieces[6]);
location_coverage.put(temploc, Double.parseDouble(pieces[5]));
allbls.add(temploc);
currentline = brpeaks.readLine();
}
brpeaks.close();
System.currentTimeMillis();
QueryGenome seqloader = null;
if(driver.c.getGenomeName().equals("mm10")){
seqloader = new QueryMm10();
seqloader.fillGenomePath();
}
for(int h=0; h< allbls.size(); h++){
String postemp = seqloader.execute(allbls.get(h));
//debug line
System.out.println(QueryGenome.cache.keySet().size());
//ends
allbls.get(h).fillSeqs(postemp.toUpperCase());
}
int size_to_consider = (int) (driver.c.getListPercentageToCosider()*0.01*allbls.size());
List<BindingLocation> totalbls= new ArrayList<BindingLocation>();
for(int k=0; k< size_to_consider; k++){
totalbls.add(allbls.get(k));
}
int i=1;
System.out.println("Total no of Binding Locations in the input peak file are: "+allbls.size() );
System.out.println("Total no of Binding Locations that are being considered: "+totalbls.size() );
File file_entire_composite = new File(driver.c.getOutName()+"_entire_locations_composite.tab");
if(!file_entire_composite.exists()){
file_entire_composite.createNewFile();
}
FileWriter fw_entire_composite = new FileWriter(file_entire_composite.getAbsoluteFile());
BufferedWriter br_entire_composite = new BufferedWriter(fw_entire_composite);
int[] allblscomposite = ChexmixSandbox.getCompositeFromBlLisr(allbls,motif_orientation);
for(int k=0; k<allblscomposite.length; k++){
br_entire_composite.write(k+"\t"+allblscomposite[k]+"\n");
}
br_entire_composite.close();
while(i<=driver.c.getNoOfCycles() && totalbls.size()>driver.c.getHowDeepToSearch()*driver.c.getNoTopBls()){
int counter = 0;
System.out.println("\n============================ Building Seed Profile - "+i+" ============================");
List<BindingLocation> selectedbls = new ArrayList<BindingLocation>();
for(counter=0; counter<driver.c.getNoTopBls(); counter++){
selectedbls.add(totalbls.get(counter));
}
BuildSeed seedbuilder = new BuildSeed(selectedbls, driver.c);
int[] profile=null;
int Final_number_in_profile=0;
CustomReturn profile_cr = null;
if(driver.c.getSchemename().equals("scheme4")){
profile_cr = seedbuilder.executeScheme4(driver.c);
}
while(counter < driver.c.getHowDeepToSearch()*driver.c.getNoTopBls() && counter < totalbls.size()){
//debug line
System.out.println(counter);
System.out.println(profile_cr.no_in_seed);
//end
if(profile_cr.no_in_seed >= driver.c.getNoTopBls()/4){
profile = profile_cr.profile;
Final_number_in_profile = profile_cr.no_in_seed;
break;
}
else if(profile_cr.no_in_seed < driver.c.getNoTopBls()/4 && profile_cr.no_in_seed > driver.c.getNoTopBls()/10){
profile = profile_cr.profile;
int no_bl_already_in= profile_cr.no_in_seed;
while(no_bl_already_in <= driver.c.getNoTopBls()/4 && counter < driver.c.getHowDeepToSearch()*driver.c.getNoTopBls() ){
BindingLocation to_scan = totalbls.get(counter);
CustomReturn temp_cr = to_scan.scanConcVecWithBl(profile, driver.c.getIntSize());
//debug line
System.out.println(temp_cr.pcc);
//end
if(temp_cr.pcc > driver.c.getSeedCutoff()){
List<Integer> addtoprofile = to_scan.getConcatenatedTags(temp_cr.maxvec.midpoint, temp_cr.maxvec.range, temp_cr.maxvec.orientation);
for(int k=0; k< addtoprofile.size(); k++){
profile[k] = profile[k]+addtoprofile.get(k);
}
no_bl_already_in++;
}
counter++;
}
Final_number_in_profile = no_bl_already_in;
//debug
System.out.println(Final_number_in_profile);
//end
break;
}
else if(profile_cr.no_in_seed <= driver.c.getNoTopBls()/10){
int temp_count=0;
selectedbls=new ArrayList<BindingLocation>();
while(temp_count<driver.c.getNoTopBls()){
selectedbls.add(totalbls.get(counter));
counter++;
temp_count++;
}
seedbuilder = new BuildSeed(selectedbls, driver.c);
if(driver.c.getSchemename().equals("scheme4")){
profile_cr = seedbuilder.executeScheme4(driver.c);
}
}
}
if(Final_number_in_profile < driver.c.getNoTopBls()/driver.c.getFactorToAddIteratively()){
int no_of_porfiles_in_dataset = i-1;
System.out.println("There are "+no_of_porfiles_in_dataset+" profiles in this dataset");
break;
}
System.out.println("Composite of seed "+i+":");
System.out.println(Arrays.toString(profile));
File file = new File(driver.c.getOutName()+"_seed_profile_composite_"+i+".tab");
if(!file.exists()){
file.createNewFile();
}
FileWriter fw = new FileWriter(file.getAbsoluteFile());
BufferedWriter br_profile = new BufferedWriter(fw);
for(int j=0; j<profile.length; j++){
br_profile.write(Integer.toString(j+1)+"\t"+Integer.toString(profile[j])+"\n");
}
br_profile.close();
driver.profiles_in_the_dataset.add(profile);
System.out.println("\n============================ Scanning the entire list of binding locations - "+i+" ============================");
LocationsScanner scanner = new LocationsScanner(totalbls, driver.c, profile);
System.out.println("No of locations that match the seed profile "+i+" are:"+scanner.getListOfBlsThatPassCuttoff().size());
List<Membership> temp_add_to_cluster_assignment = scanner.getMembershipsForThoseThatPassCuttoff(i, driver.c);
for(Membership temp : temp_add_to_cluster_assignment){
driver.cluster_assignment.add(temp);
}
File file_pcc = new File(driver.c.getOutName()+"_complete_list_pcc_"+i+".tab");
if(!file_pcc.exists()){
file_pcc.createNewFile();
}
FileWriter fw_pcc = new FileWriter(file_pcc.getAbsoluteFile());
BufferedWriter br_pcc = new BufferedWriter(fw_pcc);
Map<BindingLocation, Double> tempmap = scanner.getmapofAllblscan();
for(BindingLocation tempbl :tempmap.keySet()){
br_pcc.write(location_coverage.get(tempbl)+"\t"+tempmap.get(tempbl)+"\n");
}
br_pcc.close();
File file_tagsPass = new File(driver.c.getOutName()+"_scan_pass_profile_composite_"+i+".tab");
if(!file_tagsPass.exists()){
file_tagsPass.createNewFile();
}
FileWriter fw_tagsPass = new FileWriter(file_tagsPass.getAbsoluteFile());
BufferedWriter br_tagsPass = new BufferedWriter(fw_tagsPass);
int[][] temp = scanner.getTagsThatPassCuttoff(c);
int[] composite_passed = new int[temp[0].length];
for(int j=0; j<temp.length; j++){
for(int k=0; k<temp[j].length; k++){
if(j==0){
composite_passed[k] = temp[j][k];
}
else{
composite_passed[k] = composite_passed[k] + temp[j][k];
}
}
}
for(int j=0; j<composite_passed.length; j++){
br_tagsPass.write(j+"\t"+composite_passed[j]+"\n");
}
br_tagsPass.close();
System.currentTimeMillis();
totalbls = scanner.getListOfBlsThatDoNotPassCuttOff();
int[] tempcomposite = ChexmixSandbox.getCompositeFromBlLisr(totalbls, motif_orientation);
File file_remaining_all_composite = new File(driver.c.getOutName()+"_remaining_all_composite_"+i+".tab");
if(!file_remaining_all_composite.exists()){
file_remaining_all_composite.createNewFile();
}
FileWriter fw_remaining_all_composite = new FileWriter(file_remaining_all_composite.getAbsoluteFile());
BufferedWriter br_remaining_all_composite = new BufferedWriter(fw_remaining_all_composite);
for(int j=0; j< tempcomposite.length; j++){
br_remaining_all_composite.write(j+"\t"+tempcomposite[j]+"\n");
}
br_remaining_all_composite.close();
i++;
} //end of while
// extending the seed profiles
System.out.println("Expanding the seed profiles for each cluster group");
for(int l=0; l< driver.profiles_in_the_dataset.size(); l++){
int add_till = 20;
int k=0;
while(add_till < driver.c.getFactorToRefineSeedProfiles()*driver.c.getNoTopBls()){
if(driver.cluster_assignment.get(k).membership == l+1){
List<Integer> add_to_profile = driver.cluster_assignment.get(k).bl.getConcatenatedTags(driver.cluster_assignment.get(k).cr.maxvec.midpoint,
driver.cluster_assignment.get(k).cr.maxvec.range, driver.cluster_assignment.get(k).cr.maxvec.orientation);
for(int m =0; m< add_to_profile.size(); m++){
driver.profiles_in_the_dataset.get(l)[m] = driver.profiles_in_the_dataset.get(l)[m] + add_to_profile.get(m);
}
add_till++;
//debug line
System.out.println(add_till);
}
k++;
}
}
// re assigning cluster membership
System.out.println("Reassigning cluster membership");
List<Membership> temp = ChexmixSandbox.perfromReAssignmentOfMembership(driver.cluster_assignment, driver.profiles_in_the_dataset);
driver.cluster_assignment = temp;
temp = null;
// do cluster assessment
System.out.println("Cluster assessment");
driver.cluster_assessment = ChexmixSandbox.getClusterAssesmsent(driver.cluster_assignment, driver.profiles_in_the_dataset);
for(int m = 0; m< driver.cluster_assessment.length; m++){
String out ="";
for(int n=0; n< driver.cluster_assessment[m].length; n++){
out = out + "\t" + Double.toString(driver.cluster_assessment[m][n]);
}
System.out.println(out);
}
// Run meme to find motifs
for(int p=0; p< driver.profiles_in_the_dataset.size(); p++){
int cluster_name = p+1;
System.out.println(cluster_name);
List<String> seqs = new ArrayList<String>();
for(int k=0; k < driver.cluster_assignment.size(); k++){
if(driver.cluster_assignment.get(k).membership == cluster_name){
String tempseq = driver.cluster_assignment.get(k).getSeqAtMaxPCC();
seqs.add(tempseq);
}
}
MemeRun meme = new MemeRun(driver.c);
Pair<List<WeightMatrix>,List<WeightMatrix>> memeout = meme.execute(seqs, "cluster_"+Integer.toString(cluster_name), false);
for(int k=0; k < memeout.car().size(); k++){
System.out.println(WeightMatrix.printTransfacMatrix(memeout.car().get(k), "cluster_"+cluster_name));
}
}
//printing cluster files
System.out.println("Printing Cluster Files");
for(int k=0; k < driver.profiles_in_the_dataset.size(); k++){
File file_cluster = new File(driver.c.getOutName()+"_cluster_"+k+".peaks");
if(!file_cluster.exists()){
file_cluster.createNewFile();
}
FileWriter fw_cluster = new FileWriter(file_cluster.getAbsoluteFile());
BufferedWriter br_cluster = new BufferedWriter(fw_cluster);
for(int m=0; m < driver.cluster_assignment.size(); m++){
if(driver.cluster_assignment.get(m).membership == k+1){
br_cluster.write(driver.cluster_assignment.get(m).getPointName()+"\t"+driver.c.getIntSize()+"\t"+driver.cluster_assignment.get(m).membership+
"\t"+driver.cluster_assignment.get(m).getPCC()+"\t"+driver.cluster_assignment.get(m).bl.getName()+"\n");
}
}
br_cluster.close();
}
} // end of else
} // end of main
} // end of class
|
package gov.nih.nci.cananolab.service.particle;
import gov.nih.nci.cananolab.domain.common.Source;
import gov.nih.nci.cananolab.domain.particle.NanoparticleSample;
import gov.nih.nci.cananolab.dto.common.PublicationBean;
import gov.nih.nci.cananolab.dto.common.UserBean;
import gov.nih.nci.cananolab.dto.particle.ParticleBean;
import gov.nih.nci.cananolab.exception.CaNanoLabSecurityException;
import gov.nih.nci.cananolab.exception.DuplicateEntriesException;
import gov.nih.nci.cananolab.exception.ParticleException;
import gov.nih.nci.cananolab.service.security.AuthorizationService;
import gov.nih.nci.cananolab.util.SortableName;
import java.util.List;
import java.util.SortedSet;
/**
* Interface defining service methods involving nanoparticle samples
*
* @author pansu
*
*/
public interface NanoparticleSampleService {
/**
*
* @return all particle sources
*/
public SortedSet<Source> findAllParticleSources() throws ParticleException;
/**
*
* @return all particle sources visible to user
*/
public SortedSet<Source> findAllParticleSources(UserBean user)
throws ParticleException;
/**
* Persist a new nanoparticle sample or update an existing nanoparticle
* sample
*
* @param particleSample
* @throws ParticleException,
* DuplicateEntriesException
*/
public void saveNanoparticleSample(NanoparticleSample particleSample)
throws ParticleException, DuplicateEntriesException;
/**
*
* @param particleSource
* @param nanoparticleEntityClassNames
* @param otherNanoparticleTypes
* @param functionalizingEntityClassNames
* @param otherFunctionalizingEntityTypes
* @param functionClassNames
* @param otherFunctionTypes
* @param characterizationClassNames
* @param wordList
* @return
* @throws ParticleException
*/
public List<ParticleBean> findNanoparticleSamplesBy(String particleSource,
String[] nanoparticleEntityClassNames,
String[] otherNanoparticleTypes,
String[] functionalizingEntityClassNames,
String[] otherFunctionalizingEntityTypes,
String[] functionClassNames, String[] otherFunctionTypes,
String[] characterizationClassNames, String[] wordList)
throws ParticleException;
public ParticleBean findNanoparticleSampleById(String particleId)
throws ParticleException;
public ParticleBean findFullNanoparticleSampleById(String particleId)
throws Exception;
public NanoparticleSample findNanoparticleSampleByName(String particleName)
throws ParticleException;
/**
* Get other particles from the given particle source
*
* @param particleSource
* @param particleName
* @param user
* @return
* @throws ParticleException
* @throws CaNanoLabSecurityException
*/
public SortedSet<SortableName> findOtherParticles(String particleSource,
String particleName, UserBean user) throws ParticleException;
public void retrieveVisibility(ParticleBean particleBean, UserBean user)
throws ParticleException;
public void deleteAnnotationById(String className, Long dataId)
throws ParticleException;
public SortedSet<String> findAllNanoparticleSampleNames(UserBean user)
throws ParticleException;
public int getNumberOfPublicNanoparticleSamples() throws ParticleException;
public void assignAssociatedPublicVisibility(
AuthorizationService authService, ParticleBean particleSampleBean,
String[] visibleGroups) throws Exception;
public List<PublicationBean> findPublicationsByParticleId(String particleId)
throws Exception;
public List<ParticleBean> getUserAccessibleParticles(
List<ParticleBean> particles, UserBean user)
throws ParticleException;
}
|
package cn.kuehne.kinaseblender.gui;
import java.util.ArrayList;
import java.util.List;
import cn.kuehne.kinaseblender.engine2.CompiledCloud;
import cn.kuehne.kinaseblender.engine2.Product;
import cn.kuehne.kinaseblender.engine2.Source;
public class SourcesModel extends AbstractBasicTableModel {
private static final long serialVersionUID = 1L;
private final Object[][] data;
public SourcesModel(final CompiledCloud compiled) {
super(" Source");
final int sources = compiled.getSourceCount();
int maxProducts = 0;
final ArrayList<List<Product>> products = new ArrayList<List<Product>>();
for (int i = 0; i < sources; i++) {
final Source source = compiled.getSource(i);
final List<Product> produce = compiled.getProducts(source);
products.add(produce);
if (produce != null) {
final int size = produce.size();
if (size > maxProducts) {
maxProducts = size;
}
}
}
data = new Object[4 + maxProducts][sources + 1];
data[0][0] = "Number";
data[1][0] = "Source";
data[2][0] = "Amount";
data[3][0] = "Products";
for (int si = 0; si < maxProducts; si++) {
data[si + 4][0] = "Product_" + (si+1);
}
for (int si = 0; si < sources; si++) {
data[0][si + 1] = (si + 1);
data[1][si + 1] = compiled.getSource(si);
final List<Product> produce = products.get(si);
if (produce == null) {
data[3][si + 1] = -1;
} else {
double amount = 0;
final int max = produce.size();
data[3][si + 1] = max;
for (int pi = 0; pi < max; pi++) {
data[pi + 4][si + 1] = produce.get(pi);
amount += compiled.getValue(si,
compiled.getProduct(produce.get(pi)));
}
data[2][si + 1] = amount;
}
}
}
@Override
protected Object[][] getData() {
return data;
}
}
|
package hu.gaborkolozsy.dictionary.model.interfaces.impl;
import hu.gaborkolozsy.dictionary.model.interfaces.Service;
import hu.gaborkolozsy.dictionary.model.Config;
import java.io.IOException;
public class FileServiceImpl implements Service<String> {
/**
* {@code Config} object.
*/
private final Config config;
/**
* Constructor.
* @param config the {@code Config} object
* @throws IOException by failed I/O operations
*/
public FileServiceImpl(Config config) throws IOException {
this.config = config;
}
/**
* Return the language name by the specifid index.
* @param fileName the actual dictionary's name
* @param index array index
* @return language name
* @throws IOException by failed I/O operations
*/
public String split(String fileName, int index) throws IOException {
String[] languages = fileName.split("-");
return languages[index];
}
/**
* Return file name.
* @return the actual dictionary's file name
* @throws IOException by failed I/O operations
*/
@Override
public String get() throws IOException {
return choose(config.getPropertie("Dictionary"));
}
/**
* Set file.
* @param file the actual ditionary ID
*/
@Override
public void set(String file) throws IOException {
this.config.storePropertie("Dictionary", file);
}
/**
* Choose dictionary's name.
* @throws IOException by failed I/O operations
*/
@Override
public String choose(String text) throws IOException {
String fileName = "";
switch (text) {
case "1": fileName = "English-Hungarian"; break;
case "2": fileName = "Hungarian-English"; break;
case "3": fileName = "German-Hungarian"; break;
case "4": fileName = "Hungarian-German"; break;
case "5": fileName = "English-German"; break;
case "6": fileName = "German-English"; break;
}
return fileName;
}
}
|
package com.afollestad.silk.views;
import android.content.Context;
import android.util.AttributeSet;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.LinearLayout;
import android.widget.Spinner;
import com.afollestad.silk.R;
import java.util.Calendar;
/**
* A date picker that takes up less vertical space than the stock DatePicker.
*
* @author Aidan Follestad (afollestad)
*/
public class SilkDatePicker extends LinearLayout {
public SilkDatePicker(Context context) {
super(context);
init();
}
public SilkDatePicker(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public SilkDatePicker(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
public class SilkSpinnerAdapter extends ArrayAdapter<String> {
public SilkSpinnerAdapter(Context context) {
super(context, R.layout.spinner_item);
super.setDropDownViewResource(R.layout.spinner_item_dropdown);
}
}
private Calendar mCal;
private int mCurrentYear;
private SilkSpinnerAdapter mMonth;
private SilkSpinnerAdapter mDay;
private SilkSpinnerAdapter mYear;
public Calendar getCalendar() {
return mCal;
}
public int getMinYear() {
return mCurrentYear - 100;
}
public int getMaxYear() {
return mCurrentYear + 100;
}
private void init() {
setOrientation(LinearLayout.HORIZONTAL);
setGravity(Gravity.CENTER_VERTICAL);
setWeightSum(3);
LayoutInflater inflater = (LayoutInflater) getContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
inflater.inflate(R.layout.silk_date_picker, this, true);
mCal = Calendar.getInstance();
mCurrentYear = mCal.get(Calendar.YEAR);
Spinner monthSpinner = (Spinner) getChildAt(0);
Spinner daySpinner = (Spinner) getChildAt(1);
Spinner yearSpinner = (Spinner) getChildAt(2);
mMonth = new SilkSpinnerAdapter(getContext());
mDay = new SilkSpinnerAdapter(getContext());
mYear = new SilkSpinnerAdapter(getContext());
monthSpinner.setAdapter(mMonth);
daySpinner.setAdapter(mDay);
yearSpinner.setAdapter(mYear);
fillMonths();
fillDays();
fillYears();
monthSpinner.setSelection(mCal.get(Calendar.MONTH));
daySpinner.setSelection(mCal.get(Calendar.DAY_OF_MONTH) - 1);
yearSpinner.setSelection(mCurrentYear - getMinYear());
monthSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
mCal.set(Calendar.MONTH, position);
fillDays();
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
});
daySpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
mCal.set(Calendar.DAY_OF_MONTH, position + 1);
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
});
yearSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
mCal.set(Calendar.YEAR, getMinYear() + position);
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
});
}
private void fillMonths() {
mMonth.clear();
String[] months = getContext().getResources().getStringArray(R.array.months);
mMonth.addAll(months);
mMonth.notifyDataSetChanged();
}
private void fillDays() {
int daysInMonth = mCal.getActualMaximum(Calendar.DAY_OF_MONTH);
mDay.clear();
for (int i = 1; i <= daysInMonth; i++) mDay.add(i + "");
mDay.notifyDataSetChanged();
}
private void fillYears() {
mYear.clear();
for (int i = getMinYear(); i <= getMaxYear(); i++) mYear.add(i + "");
mYear.notifyDataSetChanged();
}
}
|
package com.braintreegateway;
/**
* An Enum representing all of the validation errors from the gateway.
*/
public enum ValidationErrorCode {
ADDRESS_CANNOT_BE_BLANK("81801"),
ADDRESS_COMPANY_IS_INVALID("91821"),
ADDRESS_COMPANY_IS_TOO_LONG("81802"),
ADDRESS_COUNTRY_CODE_ALPHA2_IS_NOT_ACCEPTED("91814"),
ADDRESS_COUNTRY_CODE_ALPHA3_IS_NOT_ACCEPTED("91816"),
ADDRESS_COUNTRY_CODE_NUMERIC_IS_NOT_ACCEPTED("91817"),
ADDRESS_COUNTRY_NAME_IS_NOT_ACCEPTED("91803"),
ADDRESS_EXTENDED_ADDRESS_IS_INVALID("91823"),
ADDRESS_EXTENDED_ADDRESS_IS_TOO_LONG("81804"),
ADDRESS_FIRST_NAME_IS_INVALID("91819"),
ADDRESS_FIRST_NAME_IS_TOO_LONG("81805"),
ADDRESS_INCONSISTENT_COUNTRY("91815"),
ADDRESS_LAST_NAME_IS_INVALID("91820"),
ADDRESS_LAST_NAME_IS_TOO_LONG("81806"),
ADDRESS_LOCALITY_IS_INVALID("91824"),
ADDRESS_LOCALITY_IS_TOO_LONG("81807"),
ADDRESS_POSTAL_CODE_INVALID_CHARACTERS("81813"),
ADDRESS_POSTAL_CODE_IS_INVALID("91826"),
ADDRESS_POSTAL_CODE_IS_REQUIRED("81808"),
ADDRESS_POSTAL_CODE_IS_TOO_LONG("81809"),
ADDRESS_REGION_IS_INVALID("91825"),
ADDRESS_REGION_IS_TOO_LONG("81810"),
ADDRESS_STREET_ADDRESS_IS_INVALID("91822"),
ADDRESS_STREET_ADDRESS_IS_REQUIRED("81811"),
ADDRESS_STREET_ADDRESS_IS_TOO_LONG("81812"),
ADDRESS_TOO_MANY_ADDRESSES_PER_CUSTOMER("91818"),
CREDIT_CARD_BILLING_ADDRESS_CONFLICT("91701"),
CREDIT_CARD_BILLING_ADDRESS_ID_IS_INVALID("91702"),
CREDIT_CARD_CARDHOLDER_NAME_IS_TOO_LONG("81723"),
CREDIT_CARD_CREDIT_CARD_TYPE_IS_NOT_ACCEPTED("81703"),
CREDIT_CARD_CREDIT_CARD_TYPE_IS_NOT_ACCEPTED_BY_SUBSCRIPTION_MERCHANT_ACCOUNT("81718"),
CREDIT_CARD_CUSTOMER_ID_IS_INVALID("91705"),
CREDIT_CARD_CUSTOMER_ID_IS_REQUIRED("91704"),
CREDIT_CARD_CVV_IS_INVALID("81707"),
CREDIT_CARD_CVV_IS_REQUIRED("81706"),
CREDIT_CARD_DUPLICATE_CARD_EXISTS("81724"),
CREDIT_CARD_EXPIRATION_DATE_CONFLICT("91708"),
CREDIT_CARD_EXPIRATION_DATE_IS_INVALID("81710"),
CREDIT_CARD_EXPIRATION_DATE_IS_REQUIRED("81709"),
CREDIT_CARD_EXPIRATION_DATE_YEAR_IS_INVALID("81711"),
CREDIT_CARD_EXPIRATION_MONTH_IS_INVALID("81712"),
CREDIT_CARD_EXPIRATION_YEAR_IS_INVALID("81713"),
CREDIT_CARD_INVALID_VENMO_SDK_PAYMENT_METHOD_CODE("91727"),
CREDIT_CARD_NUMBER_HAS_INVALID_LENGTH("81716"),
CREDIT_CARD_NUMBER_LENGTH_IS_INVALID("81716"),
CREDIT_CARD_NUMBER_IS_INVALID("81715"),
CREDIT_CARD_NUMBER_IS_REQUIRED("81714"),
CREDIT_CARD_NUMBER_MUST_BE_TEST_NUMBER("81717"),
CREDIT_CARD_OPTIONS_UPDATE_EXISTING_TOKEN_IS_INVALID("91723"),
CREDIT_CARD_PAYMENT_METHOD_CONFLICT("81725"),
CREDIT_CARD_TOKEN_INVALID("91718"),
CREDIT_CARD_TOKEN_FORMAT_IS_INVALID("91718"),
CREDIT_CARD_TOKEN_IS_IN_USE("91719"),
CREDIT_CARD_TOKEN_IS_NOT_ALLOWED("91721"),
CREDIT_CARD_TOKEN_IS_REQUIRED("91722"),
CREDIT_CARD_TOKEN_IS_TOO_LONG("91720"),
CREDIT_CARD_VENMO_SDK_PAYMENT_METHOD_CODE_CARD_TYPE_IS_NOT_ACCEPTED("91726"),
CUSTOMER_COMPANY_IS_TOO_LONG("81601"),
CUSTOMER_CUSTOM_FIELD_IS_INVALID("91602"),
CUSTOMER_CUSTOM_FIELD_IS_TOO_LONG("81603"),
CUSTOMER_EMAIL_IS_INVALID("81604"),
CUSTOMER_EMAIL_FORMAT_IS_INVALID("81604"),
CUSTOMER_EMAIL_IS_REQUIRED("81606"),
CUSTOMER_EMAIL_IS_TOO_LONG("81605"),
CUSTOMER_FAX_IS_TOO_LONG("81607"),
CUSTOMER_FIRST_NAME_IS_TOO_LONG("81608"),
CUSTOMER_ID_IS_INVAILD("91610"), //Deprecated
CUSTOMER_ID_IS_INVALID("91610"), //Deprecated
CUSTOMER_ID_IS_IN_USE("91609"),
CUSTOMER_ID_IS_NOT_ALLOWED("91611"),
CUSTOMER_ID_IS_REQUIRED("91613"),
CUSTOMER_ID_IS_TOO_LONG("91612"),
CUSTOMER_LAST_NAME_IS_TOO_LONG("81613"),
CUSTOMER_PHONE_IS_TOO_LONG("81614"),
CUSTOMER_WEBSITE_IS_INVALID("81616"),
CUSTOMER_WEBSITE_FORMAT_IS_INVALID("81616"),
CUSTOMER_WEBSITE_IS_TOO_LONG("81615"),
DESCRIPTOR_DYNAMIC_DESCRIPTORS_DISABLED("92203"),
DESCRIPTOR_INTERNATIONAL_NAME_FORMAT_IS_INVALID("92204"),
DESCRIPTOR_INTERNATIONAL_PHONE_FORMAT_IS_INVALID("92205"),
DESCRIPTOR_NAME_FORMAT_IS_INVALID("92201"),
DESCRIPTOR_PHONE_FORMAT_IS_INVALID("92202"),
SETTLEMENT_BATCH_SUMMARY_SETTLEMENT_DATE_IS_INVALID("82302"),
SETTLEMENT_BATCH_SUMMARY_SETTLEMENT_DATE_IS_REQUIRED("82301"),
SETTLEMENT_BATCH_SUMMARY_CUSTOM_FIELD_IS_INVALID("82303"),
SUBSCRIPTION_BILLING_DAY_OF_MONTH_CANNOT_BE_UPDATED("91918"),
SUBSCRIPTION_BILLING_DAY_OF_MONTH_IS_INVALID("91914"),
SUBSCRIPTION_BILLING_DAY_OF_MONTH_MUST_BE_NUMERIC("91913"),
SUBSCRIPTION_CANNOT_ADD_DUPLICATE_ADDON_OR_DISCOUNT("91911"),
SUBSCRIPTION_CANNOT_EDIT_CANCELED_SUBSCRIPTION("81901"),
SUBSCRIPTION_CANNOT_EDIT_EXPIRED_SUBSCRIPTION("81910"),
SUBSCRIPTION_CANNOT_EDIT_PRICE_CHANGING_FIELDS_ON_PAST_DUE_SUBSCRIPTION("91920"),
SUBSCRIPTION_FIRST_BILLING_DATE_CANNOT_BE_IN_THE_PAST("91916"),
SUBSCRIPTION_FIRST_BILLING_DATE_CANNOT_BE_UPDATED("91919"),
SUBSCRIPTION_FIRST_BILLING_DATE_IS_INVALID("91915"),
SUBSCRIPTION_ID_IS_IN_USE("81902"),
SUBSCRIPTION_INCONSISTENT_NUMBER_OF_BILLING_CYCLES("91908"),
SUBSCRIPTION_INCONSISTENT_START_DATE("91917"),
SUBSCRIPTION_INVALID_REQUEST_FORMAT("91921"),
SUBSCRIPTION_MERCHANT_ACCOUNT_ID_IS_INVALID("91901"),
SUBSCRIPTION_MISMATCH_CURRENCY_ISO_CODE("91923"),
SUBSCRIPTION_NUMBER_OF_BILLING_CYCLES_CANNOT_BE_BLANK("91912"),
SUBSCRIPTION_NUMBER_OF_BILLING_CYCLES_IS_TOO_SMALL("91909"),
SUBSCRIPTION_NUMBER_OF_BILLING_CYCLES_MUST_BE_GREATER_THAN_ZERO("91907"),
SUBSCRIPTION_NUMBER_OF_BILLING_CYCLES_MUST_BE_NUMERIC("91906"),
SUBSCRIPTION_PAYMENT_METHOD_TOKEN_CARD_TYPE_IS_NOT_ACCEPTED("91902"),
SUBSCRIPTION_PAYMENT_METHOD_TOKEN_IS_INVALID("91903"),
SUBSCRIPTION_PAYMENT_METHOD_TOKEN_NOT_ASSOCIATED_WITH_CUSTOMER("91905"),
SUBSCRIPTION_PLAN_BILLING_FREQUENCY_CANNOT_BE_UPDATED("91922"),
SUBSCRIPTION_PLAN_ID_IS_INVALID("91904"),
SUBSCRIPTION_PRICE_CANNOT_BE_BLANK("81903"),
SUBSCRIPTION_PRICE_FORMAT_IS_INVALID("81904"),
SUBSCRIPTION_PRICE_IS_TOO_LARGE("81923"),
SUBSCRIPTION_STATUS_IS_CANCELED("81905"),
SUBSCRIPTION_TOKEN_FORMAT_IS_INVALID("81906"),
SUBSCRIPTION_TRIAL_DURATION_FORMAT_IS_INVALID("81907"),
SUBSCRIPTION_TRIAL_DURATION_IS_REQUIRED("81908"),
SUBSCRIPTION_TRIAL_DURATION_UNIT_IS_INVALID("81909"),
SUBSCRIPTION_MODIFICATION_AMOUNT_CANNOT_BE_BLANK("92003"),
SUBSCRIPTION_MODIFICATION_AMOUNT_IS_INVALID("92002"),
SUBSCRIPTION_MODIFICATION_AMOUNT_IS_TOO_LARGE("92023"),
SUBSCRIPTION_MODIFICATION_CANNOT_EDIT_MODIFICATIONS_ON_PAST_DUE_SUBSCRIPTION("92022"),
SUBSCRIPTION_MODIFICATION_CANNOT_UPDATE_AND_REMOVE("92015"),
SUBSCRIPTION_MODIFICATION_EXISTING_ID_IS_INCORRECT_KIND("92020"),
SUBSCRIPTION_MODIFICATION_EXISTING_ID_IS_INVALID("92011"),
SUBSCRIPTION_MODIFICATION_EXISTING_ID_IS_REQUIRED("92012"),
SUBSCRIPTION_MODIFICATION_ID_TO_REMOVE_IS_INCORRECT_KIND("92021"),
SUBSCRIPTION_MODIFICATION_ID_TO_REMOVE_IS_NOT_PRESENT("92016"),
SUBSCRIPTION_MODIFICATION_INCONSISTENT_NUMBER_OF_BILLING_CYCLES("92018"),
SUBSCRIPTION_MODIFICATION_INHERITED_FROM_ID_IS_INVALID("92013"),
SUBSCRIPTION_MODIFICATION_INHERITED_FROM_ID_IS_REQUIRED("92014"),
SUBSCRIPTION_MODIFICATION_MISSING("92024"),
SUBSCRIPTION_MODIFICATION_NUMBER_OF_BILLING_CYCLES_CANNOT_BE_BLANK("92017"),
SUBSCRIPTION_MODIFICATION_NUMBER_OF_BILLING_CYCLES_IS_INVALID("92005"),
SUBSCRIPTION_MODIFICATION_NUMBER_OF_BILLING_CYCLES_MUST_BE_GREATER_THAN_ZERO("92019"),
SUBSCRIPTION_MODIFICATION_QUANTITY_CANNOT_BE_BLANK("92004"),
SUBSCRIPTION_MODIFICATION_QUANTITY_IS_INVALID("92001"),
SUBSCRIPTION_MODIFICATION_QUANTITY_MUST_BE_GREATER_THAN_ZERO("92010"),
TRANSACTION_AMOUNT_CANNOT_BE_NEGATIVE("81501"),
TRANSACTION_AMOUNT_IS_INVALID("81503"),
TRANSACTION_AMOUNT_FORMAT_IS_INVALID("81503"),
TRANSACTION_AMOUNT_IS_REQUIRED("81502"),
TRANSACTION_AMOUNT_IS_TOO_LARGE("81528"),
TRANSACTION_AMOUNT_MUST_BE_GREATER_THAN_ZERO("81531"),
TRANSACTION_BILLING_ADDRESS_CONFLICT("91530"),
TRANSACTION_CANNOT_BE_VOIDED("91504"),
TRANSACTION_CANNOT_CLONE_CREDIT("91543"),
TRANSACTION_CANNOT_CLONE_TRANSACTION_WITH_VAULT_CREDIT_CARD("91540"),
TRANSACTION_CANNOT_CLONE_UNSUCCESSFUL_TRANSACTION("91542"),
TRANSACTION_CANNOT_CLONE_VOICE_AUTHORIZATIONS("91541"),
TRANSACTION_CANNOT_REFUND_CREDIT("91505"),
TRANSACTION_CANNOT_REFUND_UNLESS_SETTLED("91506"),
TRANSACTION_CANNOT_REFUND_WITH_SUSPENDED_MERCHANT_ACCOUNT("91538"),
TRANSACTION_CANNOT_SUBMIT_FOR_SETTLEMENT("91507"),
TRANSACTION_CHANNEL_IS_TOO_LONG("91550"),
TRANSACTION_CREDIT_CARD_IS_REQUIRED("91508"),
TRANSACTION_CUSTOMER_DEFAULT_PAYMENT_METHOD_CARD_TYPE_IS_NOT_ACCEPTED("81509"),
TRANSACTION_CUSTOMER_DOES_NOT_HAVE_CREDIT_CARD("91511"),
TRANSACTION_CUSTOMER_ID_IS_INVALID("91510"),
TRANSACTION_CUSTOM_FIELD_IS_INVALID("91526"),
TRANSACTION_CUSTOM_FIELD_IS_TOO_LONG("81527"),
TRANSACTION_HAS_ALREADY_BEEN_REFUNDED("91512"),
TRANSACTION_MERCHANT_ACCOUNT_DOES_NOT_SUPPORT_REFUNDS("91547"),
TRANSACTION_MERCHANT_ACCOUNT_ID_IS_INVALID("91513"),
TRANSACTION_MERCHANT_ACCOUNT_IS_SUSPENDED("91514"),
TRANSACTION_MERCHANT_ACCOUNT_NAME_IS_INVALID("91513"), //Deprecated
TRANSACTION_OPTIONS_SUBMIT_FOR_SETTLEMENT_IS_REQUIRED_FOR_CLONING("91544"),
TRANSACTION_OPTIONS_VAULT_IS_DISABLED("91525"),
TRANSACTION_ORDER_ID_IS_TOO_LONG("91501"),
TRANSACTION_PAYMENT_METHOD_CONFLICT("91515"),
TRANSACTION_PAYMENT_METHOD_CONFLICT_WITH_VENMO_SDK("91549"),
TRANSACTION_PAYMENT_METHOD_DOES_NOT_BELONG_TO_CUSTOMER("91516"),
TRANSACTION_PAYMENT_METHOD_DOES_NOT_BELONG_TO_SUBSCRIPTION("91527"),
TRANSACTION_PAYMENT_METHOD_TOKEN_CARD_TYPE_IS_NOT_ACCEPTED("91517"),
TRANSACTION_PAYMENT_METHOD_TOKEN_IS_INVALID("91518"),
TRANSACTION_PROCESSOR_AUTHORIZATION_CODE_CANNOT_BE_SET("91519"),
TRANSACTION_PROCESSOR_AUTHORIZATION_CODE_IS_INVALID("81520"),
TRANSACTION_PROCESSOR_DOES_NOT_SUPPORT_CREDITS("91546"),
TRANSACTION_PROCESSOR_DOES_NOT_SUPPORT_VOICE_AUTHORIZATIONS("91545"),
TRANSACTION_PURCHASE_ORDER_NUMBER_IS_INVALID("91548"),
TRANSACTION_PURCHASE_ORDER_NUMBER_IS_TOO_LONG("91537"),
TRANSACTION_REFUND_AMOUNT_IS_TOO_LARGE("91521"),
TRANSACTION_SETTLEMENT_AMOUNT_IS_TOO_LARGE("91522"),
TRANSACTION_SUBSCRIPTION_DOES_NOT_BELONG_TO_CUSTOMER("91529"),
TRANSACTION_SUBSCRIPTION_ID_IS_INVALID("91528"),
TRANSACTION_SUBSCRIPTION_STATUS_MUST_BE_PAST_DUE("91531"),
TRANSACTION_TAX_AMOUNT_CANNOT_BE_NEGATIVE("81534"),
TRANSACTION_TAX_AMOUNT_FORMAT_IS_INVALID("81535"),
TRANSACTION_TAX_AMOUNT_IS_TOO_LARGE("81536"),
TRANSACTION_TYPE_IS_INVALID("91523"),
TRANSACTION_TYPE_IS_REQUIRED("91524"),
TRANSACTION_UNSUPPORTED_VOICE_AUTHORIZATION("91539"),
UNKOWN_VALIDATION_ERROR("");
public String code;
private ValidationErrorCode(String code) {
this.code = code;
}
public static ValidationErrorCode findByCode(String code) {
for (ValidationErrorCode validationErrorCode : values()) {
if (validationErrorCode.code.equals(code)) {
return validationErrorCode;
}
}
return UNKOWN_VALIDATION_ERROR;
}
}
|
package com.ecyrd.jspwiki.diff;
import java.io.IOException;
import java.util.Properties;
import org.apache.log4j.Logger;
import com.ecyrd.jspwiki.NoRequiredPropertyException;
import com.ecyrd.jspwiki.WikiEngine;
import com.ecyrd.jspwiki.util.ClassUtil;
/**
* Load, initialize and delegate to the DiffProvider that will actually do the work.
*
* @author John Volkar
*/
public class DifferenceManager
{
private static final Logger log = Logger.getLogger(DifferenceManager.class);
public static final String PROP_DIFF_PROVIDER = "jspwiki.diffProvider";
private DiffProvider m_provider;
public DifferenceManager(WikiEngine engine, Properties props)
{
loadProvider(props);
initializeProvider(engine, props);
log.info("Using difference provider: " + m_provider.getProviderInfo());
}
private void loadProvider(Properties props)
{
String providerClassName = props.getProperty( PROP_DIFF_PROVIDER,
TraditionalDiffProvider.class.getName() );
try
{
Class providerClass = ClassUtil.findClass( "com.ecyrd.jspwiki.diff", providerClassName );
m_provider = (DiffProvider)providerClass.newInstance();
}
catch( ClassNotFoundException e )
{
log.warn("Failed loading DiffProvider, will use NullDiffProvider.", e);
}
catch( InstantiationException e )
{
log.warn("Failed loading DiffProvider, will use NullDiffProvider.", e);
}
catch( IllegalAccessException e )
{
log.warn("Failed loading DiffProvider, will use NullDiffProvider.", e);
}
if( null == m_provider )
{
m_provider = new DiffProvider.NullDiffProvider();
}
}
private void initializeProvider(WikiEngine engine, Properties props)
{
try
{
m_provider.initialize( engine, props);
}
catch (NoRequiredPropertyException e1)
{
log.warn("Failed initializing DiffProvider, will use NullDiffProvider.", e1);
m_provider = new DiffProvider.NullDiffProvider(); //doesn't need init'd
}
catch (IOException e1)
{
log.warn("Failed initializing DiffProvider, will use NullDiffProvider.", e1);
m_provider = new DiffProvider.NullDiffProvider(); //doesn't need init'd
}
}
/**
* Returns valid XHTML string to be used in any way you please.
*
* @return XHTML, or empty string, if no difference detected.
*/
public String makeDiff(String firstWikiText, String secondWikiText)
{
String diff = m_provider.makeDiffHtml( firstWikiText, secondWikiText);
if( diff == null )
diff = "";
return diff;
}
}
|
package com.cloud.storage;
import java.net.URI;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.Enumeration;
import java.util.Formatter;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.ejb.Local;
import javax.naming.ConfigurationException;
import org.apache.log4j.Logger;
import com.cloud.agent.AgentManager;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.BackupSnapshotCommand;
import com.cloud.agent.api.Command;
import com.cloud.agent.api.CreateVolumeFromSnapshotAnswer;
import com.cloud.agent.api.CreateVolumeFromSnapshotCommand;
import com.cloud.agent.api.DeleteStoragePoolCommand;
import com.cloud.agent.api.ManageSnapshotCommand;
import com.cloud.agent.api.ModifyStoragePoolAnswer;
import com.cloud.agent.api.ModifyStoragePoolCommand;
import com.cloud.agent.api.storage.CopyVolumeAnswer;
import com.cloud.agent.api.storage.CopyVolumeCommand;
import com.cloud.agent.api.storage.CreateAnswer;
import com.cloud.agent.api.storage.CreateCommand;
import com.cloud.agent.api.storage.DeleteTemplateCommand;
import com.cloud.agent.api.storage.DestroyCommand;
import com.cloud.agent.api.to.VolumeTO;
import com.cloud.alert.AlertManager;
import com.cloud.api.BaseCmd;
import com.cloud.async.AsyncInstanceCreateStatus;
import com.cloud.async.AsyncJobExecutor;
import com.cloud.async.AsyncJobManager;
import com.cloud.async.AsyncJobVO;
import com.cloud.async.BaseAsyncJobExecutor;
import com.cloud.capacity.CapacityVO;
import com.cloud.capacity.dao.CapacityDao;
import com.cloud.configuration.Config;
import com.cloud.configuration.ConfigurationManager;
import com.cloud.configuration.ResourceCount.ResourceType;
import com.cloud.configuration.dao.ConfigurationDao;
import com.cloud.consoleproxy.ConsoleProxyManager;
import com.cloud.dc.DataCenterVO;
import com.cloud.dc.HostPodVO;
import com.cloud.dc.dao.DataCenterDao;
import com.cloud.dc.dao.HostPodDao;
import com.cloud.event.EventState;
import com.cloud.event.EventTypes;
import com.cloud.event.EventVO;
import com.cloud.event.dao.EventDao;
import com.cloud.exception.AgentUnavailableException;
import com.cloud.exception.ConcurrentOperationException;
import com.cloud.exception.DiscoveryException;
import com.cloud.exception.InsufficientCapacityException;
import com.cloud.exception.InternalErrorException;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.exception.OperationTimedoutException;
import com.cloud.exception.ResourceAllocationException;
import com.cloud.exception.ResourceInUseException;
import com.cloud.exception.StorageUnavailableException;
import com.cloud.host.Host;
import com.cloud.host.Host.Type;
import com.cloud.host.HostVO;
import com.cloud.host.Status;
import com.cloud.host.dao.DetailsDao;
import com.cloud.host.dao.HostDao;
import com.cloud.hypervisor.Hypervisor;
import com.cloud.network.NetworkManager;
import com.cloud.offering.ServiceOffering;
import com.cloud.service.ServiceOfferingVO;
import com.cloud.service.dao.ServiceOfferingDao;
import com.cloud.storage.Storage.ImageFormat;
import com.cloud.storage.Storage.StoragePoolType;
import com.cloud.storage.Volume.MirrorState;
import com.cloud.storage.Volume.SourceType;
import com.cloud.storage.Volume.VolumeType;
import com.cloud.storage.allocator.StoragePoolAllocator;
import com.cloud.storage.dao.DiskOfferingDao;
import com.cloud.storage.dao.SnapshotDao;
import com.cloud.storage.dao.StoragePoolDao;
import com.cloud.storage.dao.StoragePoolHostDao;
import com.cloud.storage.dao.VMTemplateDao;
import com.cloud.storage.dao.VMTemplateHostDao;
import com.cloud.storage.dao.VMTemplatePoolDao;
import com.cloud.storage.dao.VolumeDao;
import com.cloud.storage.listener.StoragePoolMonitor;
import com.cloud.storage.secondary.SecondaryStorageVmManager;
import com.cloud.storage.snapshot.SnapshotManager;
import com.cloud.storage.snapshot.SnapshotScheduler;
import com.cloud.template.TemplateManager;
import com.cloud.user.Account;
import com.cloud.user.AccountManager;
import com.cloud.user.AccountVO;
import com.cloud.user.User;
import com.cloud.user.dao.AccountDao;
import com.cloud.user.dao.UserDao;
import com.cloud.uservm.UserVm;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.Pair;
import com.cloud.utils.component.Adapters;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.component.Inject;
import com.cloud.utils.concurrency.NamedThreadFactory;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.GlobalLock;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.exception.ExecutionException;
import com.cloud.vm.DiskProfile;
import com.cloud.vm.State;
import com.cloud.vm.UserVmManager;
import com.cloud.vm.UserVmVO;
import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.dao.ConsoleProxyDao;
import com.cloud.vm.dao.UserVmDao;
import com.cloud.vm.dao.VMInstanceDao;
@Local(value = { StorageManager.class })
public class StorageManagerImpl implements StorageManager {
private static final Logger s_logger = Logger.getLogger(StorageManagerImpl.class);
protected String _name;
@Inject protected UserVmManager _userVmMgr;
@Inject protected AgentManager _agentMgr;
@Inject protected TemplateManager _tmpltMgr;
@Inject protected AsyncJobManager _asyncMgr;
@Inject protected SnapshotManager _snapshotMgr;
@Inject protected SnapshotScheduler _snapshotScheduler;
@Inject protected AccountManager _accountMgr;
@Inject protected ConfigurationManager _configMgr;
@Inject protected ConsoleProxyManager _consoleProxyMgr;
@Inject protected SecondaryStorageVmManager _secStorageMgr;
@Inject protected NetworkManager _networkMgr;
@Inject protected VolumeDao _volsDao;
@Inject protected HostDao _hostDao;
@Inject protected ConsoleProxyDao _consoleProxyDao;
@Inject protected DetailsDao _detailsDao;
@Inject protected SnapshotDao _snapshotDao;
protected Adapters<StoragePoolAllocator> _storagePoolAllocators;
protected Adapters<StoragePoolDiscoverer> _discoverers;
@Inject protected StoragePoolHostDao _storagePoolHostDao;
@Inject protected AlertManager _alertMgr;
@Inject protected VMTemplateHostDao _vmTemplateHostDao = null;
@Inject protected VMTemplatePoolDao _vmTemplatePoolDao = null;
@Inject protected VMTemplateDao _vmTemplateDao = null;
@Inject protected StoragePoolHostDao _poolHostDao = null;
@Inject protected UserVmDao _userVmDao;
@Inject protected VMInstanceDao _vmInstanceDao;
@Inject protected StoragePoolDao _storagePoolDao = null;
@Inject protected CapacityDao _capacityDao;
@Inject protected DiskOfferingDao _diskOfferingDao;
@Inject protected AccountDao _accountDao;
@Inject protected EventDao _eventDao = null;
@Inject protected DataCenterDao _dcDao = null;
@Inject protected HostPodDao _podDao = null;
@Inject protected VMTemplateDao _templateDao;
@Inject protected VMTemplateHostDao _templateHostDao;
@Inject protected ServiceOfferingDao _offeringDao;
@Inject protected UserDao _userDao;
protected SearchBuilder<VMTemplateHostVO> HostTemplateStatesSearch;
protected SearchBuilder<StoragePoolVO> PoolsUsedByVmSearch;
ScheduledExecutorService _executor = null;
boolean _storageCleanupEnabled;
int _storageCleanupInterval;
int _storagePoolAcquisitionWaitSeconds = 1800; // 30 minutes
protected int _retry = 2;
protected int _pingInterval = 60; // seconds
protected int _hostRetry;
protected int _overProvisioningFactor = 1;
private int _totalRetries;
private int _pauseInterval;
private final boolean _shouldBeSnapshotCapable = true;
private Hypervisor.Type _hypervisorType;
@Override
public boolean share(VMInstanceVO vm, List<VolumeVO> vols, HostVO host, boolean cancelPreviousShare) {
//this check is done for maintenance mode for primary storage
//if any one of the volume is unusable, we return false
//if we return false, the allocator will try to switch to another PS if available
for(VolumeVO vol: vols)
{
if(vol.getRemoved()!=null)
{
s_logger.warn("Volume id:"+vol.getId()+" is removed, cannot share on this instance");
//not ok to share
return false;
}
}
//ok to share
return true;
}
@DB
public List<VolumeVO> allocate(DiskProfile rootDisk, List<DiskProfile> dataDisks, VMInstanceVO vm, DataCenterVO dc, AccountVO account) {
ArrayList<VolumeVO> vols = new ArrayList<VolumeVO>(dataDisks.size() + 1);
VolumeVO dataVol = null;
VolumeVO rootVol = null;
long deviceId = 0;
Transaction txn = Transaction.currentTxn();
txn.start();
rootVol = new VolumeVO(VolumeType.ROOT, rootDisk.getName(), dc.getId(), account.getDomainId(), account.getId(), rootDisk.getDiskOfferingId(), rootDisk.getSize());
if (rootDisk.getTemplateId() != null) {
rootVol.setTemplateId(rootDisk.getTemplateId());
}
rootVol.setInstanceId(vm.getId());
rootVol.setDeviceId(deviceId++);
rootVol = _volsDao.persist(rootVol);
vols.add(rootVol);
for (DiskProfile dataDisk : dataDisks) {
dataVol = new VolumeVO(VolumeType.DATADISK, dataDisk.getName(), dc.getId(), account.getDomainId(), account.getId(), dataDisk.getDiskOfferingId(), dataDisk.getSize());
dataVol.setDeviceId(deviceId++);
dataVol.setInstanceId(vm.getId());
dataVol = _volsDao.persist(dataVol);
vols.add(dataVol);
}
txn.commit();
return vols;
}
@Override
public VolumeVO allocateIsoInstalledVm(VMInstanceVO vm, VMTemplateVO template, DiskOfferingVO rootOffering, Long size, DataCenterVO dc, AccountVO account) {
assert (template.getFormat() == ImageFormat.ISO) : "The template has to be ISO";
long rootId = _volsDao.getNextInSequence(Long.class, "volume_seq");
DiskProfile rootDisk = new DiskProfile(rootId, VolumeType.ROOT, "ROOT-" + vm.getId() + "-" + rootId, rootOffering.getId(), size != null ? size : rootOffering.getDiskSizeInBytes(), rootOffering.getTagsArray(), rootOffering.getUseLocalStorage(), rootOffering.isRecreatable(), null);
List<VolumeVO> vols = allocate(rootDisk, null, vm, dc, account);
return vols.get(0);
}
@Override
public List<VolumeVO> prepare(VMInstanceVO vm, HostVO host) {
List<VolumeVO> vols = _volsDao.findCreatedByInstance(vm.getId());
List<VolumeVO> recreateVols = new ArrayList<VolumeVO>(vols.size());
for (VolumeVO vol : vols) {
if (!vol.isRecreatable()) {
return vols;
}
//if we have a system vm
//get the storage pool
//if pool is in maintenance
//add to recreate vols, and continue
if(vm.getType().equals(VirtualMachine.Type.ConsoleProxy) || vm.getType().equals(VirtualMachine.Type.DomainRouter) || vm.getType().equals(VirtualMachine.Type.SecondaryStorageVm))
{
StoragePoolVO sp = _storagePoolDao.findById(vol.getPoolId());
if(sp!=null && sp.getStatus().equals(Status.PrepareForMaintenance))
{
recreateVols.add(vol);
continue;
}
}
StoragePoolHostVO ph = _storagePoolHostDao.findByPoolHost(vol.getPoolId(), host.getId());
if (ph == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Must recreate " + vol + " since " + vol.getPoolId() + " has is not hooked up with host " + host.getId());
}
recreateVols.add(vol);
}
}
if (recreateVols.size() == 0) {
s_logger.debug("No need to recreate the volumes");
return vols;
}
List<VolumeVO> createds = new ArrayList<VolumeVO>(recreateVols.size());
for (VolumeVO vol : recreateVols) {
VolumeVO create = new VolumeVO(vol.getVolumeType(), vol.getInstanceId(), vol.getTemplateId(), vol.getName(), vol.getDataCenterId(), host.getPodId(), vol.getAccountId(), vol.getDomainId(), vol.isRecreatable());
create.setDiskOfferingId(vol.getDiskOfferingId());
create.setDeviceId(vol.getDeviceId());
create = _volsDao.persist(create);
VMTemplateVO template = _templateDao.findById(create.getTemplateId());
DataCenterVO dc = _dcDao.findById(create.getDataCenterId());
HostPodVO pod = _podDao.findById(host.getPodId());
DiskOfferingVO diskOffering = null;
diskOffering = _diskOfferingDao.findById(vol.getDiskOfferingId());
ServiceOfferingVO offering;
if (vm instanceof UserVmVO) {
offering = _offeringDao.findById(((UserVmVO)vm).getServiceOfferingId());
} else {
offering = _offeringDao.findById(vol.getDiskOfferingId());
}
VolumeVO created = createVolume(create, vm, template, dc, pod, host.getClusterId(), offering, diskOffering, new ArrayList<StoragePoolVO>(),0);
if (created == null) {
break;
}
createds.add(created);
}
for (VolumeVO vol : recreateVols) {
_volsDao.remove(vol.getId());
}
return createds;
}
@Override
public List<Pair<VolumeVO, StoragePoolVO>> isStoredOn(VMInstanceVO vm) {
List<Pair<VolumeVO, StoragePoolVO>> lst = new ArrayList<Pair<VolumeVO, StoragePoolVO>>();
List<VolumeVO> vols = _volsDao.findByInstance(vm.getId());
for (VolumeVO vol : vols) {
StoragePoolVO pool = _storagePoolDao.findById(vol.getPoolId());
lst.add(new Pair<VolumeVO, StoragePoolVO>(vol, pool));
}
return lst;
}
@Override
public boolean isLocalStorageActiveOnHost(HostVO host) {
List<StoragePoolHostVO> storagePoolHostRefs = _storagePoolHostDao.listByHostId(host.getId());
for (StoragePoolHostVO storagePoolHostRef : storagePoolHostRefs) {
StoragePoolVO storagePool = _storagePoolDao.findById(storagePoolHostRef.getPoolId());
if (storagePool.getPoolType() == StoragePoolType.LVM) {
SearchBuilder<VolumeVO> volumeSB = _volsDao.createSearchBuilder();
volumeSB.and("poolId", volumeSB.entity().getPoolId(), SearchCriteria.Op.EQ);
volumeSB.and("removed", volumeSB.entity().getRemoved(), SearchCriteria.Op.NULL);
SearchBuilder<VMInstanceVO> activeVmSB = _vmInstanceDao.createSearchBuilder();
activeVmSB.and("state", activeVmSB.entity().getState(), SearchCriteria.Op.IN);
volumeSB.join("activeVmSB", activeVmSB, volumeSB.entity().getInstanceId(), activeVmSB.entity().getId());
SearchCriteria<VolumeVO> volumeSC = volumeSB.create();
volumeSC.setParameters("poolId", storagePool.getId());
volumeSC.setJoinParameters("activeVmSB", "state", new Object[] {State.Creating, State.Starting, State.Running, State.Stopping, State.Migrating});
List<VolumeVO> volumes = _volsDao.search(volumeSC, null);
if (volumes.size() > 0) {
return true;
}
}
}
return false;
}
@Override
public List<VolumeVO> unshare(VMInstanceVO vm, HostVO host) {
final List<VolumeVO> vols = _volsDao.findCreatedByInstance(vm.getId());
if (vols.size() == 0) {
return vols;
}
return unshare(vm, vols, host) ? vols : null;
}
protected StoragePoolVO findStoragePool(DiskProfile dskCh, final DataCenterVO dc, HostPodVO pod, Long clusterId, final ServiceOffering offering, final VMInstanceVO vm, final VMTemplateVO template, final Set<StoragePool> avoid) {
Enumeration<StoragePoolAllocator> en = _storagePoolAllocators.enumeration();
while (en.hasMoreElements()) {
final StoragePoolAllocator allocator = en.nextElement();
final StoragePool pool = allocator.allocateToPool(dskCh, offering, dc, pod, clusterId, vm, template, avoid);
if (pool != null) {
return (StoragePoolVO) pool;
}
}
return null;
}
@Override
public Long findHostIdForStoragePool(StoragePoolVO pool) {
List<StoragePoolHostVO> poolHosts = _poolHostDao.listByHostStatus(pool.getId(), Status.Up);
if (poolHosts.size() == 0) {
return null;
} else {
return poolHosts.get(0).getHostId();
}
}
@Override
public Answer[] sendToPool(StoragePoolVO pool, Command[] cmds, boolean stopOnError) {
List<StoragePoolHostVO> poolHosts = _poolHostDao.listByHostStatus(pool.getId(), Status.Up);
Collections.shuffle(poolHosts);
for (StoragePoolHostVO poolHost: poolHosts) {
try {
Answer[] answerRet = _agentMgr.send(poolHost.getHostId(), cmds, stopOnError);
return answerRet;
} catch (AgentUnavailableException e) {
s_logger.debug("Moving on because unable to send to " + poolHost.getHostId() + " due to " + e.getMessage());
} catch (OperationTimedoutException e) {
s_logger.debug("Moving on because unable to send to " + poolHost.getHostId() + " due to " + e.getMessage());
}
}
if( !poolHosts.isEmpty() ) {
s_logger.warn("Unable to send commands to the pool because we ran out of hosts to send to");
}
return null;
}
@Override
public Answer sendToPool(StoragePoolVO pool, Command cmd) {
Command[] cmds = new Command[]{cmd};
Answer[] answers = sendToPool(pool, cmds, true);
if (answers == null) {
return null;
}
return answers[0];
}
protected DiskProfile createDiskCharacteristics(VolumeVO volume, VMTemplateVO template, DataCenterVO dc, DiskOfferingVO diskOffering) {
if (volume.getVolumeType() == VolumeType.ROOT && Storage.ImageFormat.ISO != template.getFormat()) {
SearchCriteria<VMTemplateHostVO> sc = HostTemplateStatesSearch.create();
sc.setParameters("id", template.getId());
sc.setParameters("state", com.cloud.storage.VMTemplateStorageResourceAssoc.Status.DOWNLOADED);
sc.setJoinParameters("host", "dcId", dc.getId());
List<VMTemplateHostVO> sss = _vmTemplateHostDao.search(sc, null);
if (sss.size() == 0) {
throw new CloudRuntimeException("Template " + template.getName() + " has not been completely downloaded to zone " + dc.getId());
}
VMTemplateHostVO ss = sss.get(0);
return new DiskProfile(volume.getId(), volume.getVolumeType(), volume.getName(), diskOffering.getId(), ss.getSize(), diskOffering.getTagsArray(), diskOffering.getUseLocalStorage(), diskOffering.isRecreatable(), Storage.ImageFormat.ISO != template.getFormat() ? template.getId() : null);
} else {
return new DiskProfile(volume.getId(), volume.getVolumeType(), volume.getName(), diskOffering.getId(), diskOffering.getDiskSizeInBytes(), diskOffering.getTagsArray(), diskOffering.getUseLocalStorage(), diskOffering.isRecreatable(), null);
}
}
@Override
public boolean canVmRestartOnAnotherServer(long vmId) {
List<VolumeVO> vols = _volsDao.findCreatedByInstance(vmId);
for (VolumeVO vol : vols) {
if (!vol.isRecreatable() && !vol.getPoolType().isShared()) {
return false;
}
}
return true;
}
@DB
protected Pair<VolumeVO, String> createVolumeFromSnapshot(long userId, long accountId, String userSpecifiedName, DataCenterVO dc, DiskOfferingVO diskOffering, SnapshotVO snapshot, String templatePath, Long originalVolumeSize, VMTemplateVO template) {
VolumeVO createdVolume = null;
Long volumeId = null;
String volumeFolder = null;
// Create the Volume object and save it so that we can return it to the user
Account account = _accountDao.findById(accountId);
VolumeVO volume = new VolumeVO(userSpecifiedName, -1, -1, -1, -1, new Long(-1), null, null, 0, Volume.VolumeType.DATADISK);
volume.setPoolId(null);
volume.setDataCenterId(dc.getId());
volume.setPodId(null);
volume.setAccountId(accountId);
volume.setDomainId(account.getDomainId());
volume.setMirrorState(MirrorState.NOT_MIRRORED);
if (diskOffering != null) {
volume.setDiskOfferingId(diskOffering.getId());
}
volume.setSize(originalVolumeSize);
volume.setStorageResourceType(Storage.StorageResourceType.STORAGE_POOL);
volume.setInstanceId(null);
volume.setUpdated(new Date());
volume.setStatus(AsyncInstanceCreateStatus.Creating);
volume.setSourceType(SourceType.Snapshot);
volume.setSourceId(snapshot.getId());
volume = _volsDao.persist(volume);
volumeId = volume.getId();
AsyncJobExecutor asyncExecutor = BaseAsyncJobExecutor.getCurrentExecutor();
if(asyncExecutor != null) {
AsyncJobVO job = asyncExecutor.getJob();
if(s_logger.isInfoEnabled())
s_logger.info("CreateVolume created a new instance " + volumeId + ", update async job-" + job.getId() + " progress status");
_asyncMgr.updateAsyncJobAttachment(job.getId(), "volume", volumeId);
_asyncMgr.updateAsyncJobStatus(job.getId(), BaseCmd.PROGRESS_INSTANCE_CREATED, volumeId);
}
final HashSet<StoragePool> poolsToAvoid = new HashSet<StoragePool>();
StoragePoolVO pool = null;
boolean success = false;
Set<Long> podsToAvoid = new HashSet<Long>();
Pair<HostPodVO, Long> pod = null;
String volumeUUID = null;
String details = null;
DiskProfile dskCh = createDiskCharacteristics(volume, template, dc, diskOffering);
// Determine what pod to store the volume in
while ((pod = _agentMgr.findPod(null, null, dc, account.getId(), podsToAvoid)) != null) {
// Determine what storage pool to store the volume in
while ((pool = findStoragePool(dskCh, dc, pod.first(), null, null, null, null, poolsToAvoid)) != null) {
volumeFolder = pool.getPath();
if (s_logger.isDebugEnabled()) {
s_logger.debug("Attempting to create volume from snapshotId: " + snapshot.getId() + " on storage pool " + pool.getName());
}
// Get the newly created VDI from the snapshot.
// This will return a null volumePath if it could not be created
Pair<String, String> volumeDetails = createVDIFromSnapshot(userId, snapshot, pool, templatePath);
volumeUUID = volumeDetails.first();
details = volumeDetails.second();
if (volumeUUID != null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Volume with UUID " + volumeUUID + " was created on storage pool " + pool.getName());
}
success = true;
break; // break out of the "find storage pool" loop
}
s_logger.warn("Unable to create volume on pool " + pool.getName() + ", reason: " + details);
}
if (success) {
break; // break out of the "find pod" loop
} else {
podsToAvoid.add(pod.first().getId());
}
}
// Update the volume in the database
Transaction txn = Transaction.currentTxn();
txn.start();
createdVolume = _volsDao.findById(volumeId);
if (success) {
// Increment the number of volumes
_accountMgr.incrementResourceCount(accountId, ResourceType.volume);
createdVolume.setStatus(AsyncInstanceCreateStatus.Created);
createdVolume.setPodId(pod.first().getId());
createdVolume.setPoolId(pool.getId());
createdVolume.setPoolType(pool.getPoolType());
createdVolume.setFolder(volumeFolder);
createdVolume.setPath(volumeUUID);
createdVolume.setDomainId(account.getDomainId());
} else {
createdVolume.setStatus(AsyncInstanceCreateStatus.Corrupted);
createdVolume.setDestroyed(true);
}
_volsDao.update(volumeId, createdVolume);
txn.commit();
return new Pair<VolumeVO, String>(createdVolume, details);
}
@Override
@DB
public VolumeVO createVolumeFromSnapshot(long userId, long accountId, long snapshotId, String volumeName, long startEventId) {
EventVO event = new EventVO();
event.setUserId(userId);
event.setAccountId(accountId);
event.setType(EventTypes.EVENT_VOLUME_CREATE);
event.setState(EventState.Started);
event.setStartId(startEventId);
event.setDescription("Creating volume from snapshot with id: "+snapshotId);
_eventDao.persist(event);
// By default, assume failure.
VolumeVO createdVolume = null;
String details = null;
Long volumeId = null;
SnapshotVO snapshot = _snapshotDao.findById(snapshotId); // Precondition: snapshot is not null and not removed.
Long origVolumeId = snapshot.getVolumeId();
VolumeVO originalVolume = _volsDao.findById(origVolumeId); // NOTE: Original volume could be destroyed and removed.
String templatePath = null;
VMTemplateVO template = null;
if(originalVolume.getVolumeType().equals(Volume.VolumeType.ROOT)){
if(originalVolume.getTemplateId() == null){
details = "Null Template Id for Root Volume Id: " + origVolumeId + ". Cannot create volume from snapshot of root disk.";
s_logger.error(details);
}
else {
Long templateId = originalVolume.getTemplateId();
template = _templateDao.findById(templateId);
if(template == null) {
details = "Unable find template id: " + templateId + " to create volume from root disk";
s_logger.error(details);
}
else if (template.getFormat() != ImageFormat.ISO) {
// For ISOs there is no base template VHD file. The root disk itself is the base template.
// Creating a volume from an ISO Root Disk is the same as creating a volume for a Data Disk.
// Absolute crappy way of getting the template path on secondary storage.
// Why is the secondary storage a host? It's just an NFS mount point. Why do we need to look into the templateHostVO?
HostVO secondaryStorageHost = getSecondaryStorageHost(originalVolume.getDataCenterId());
VMTemplateHostVO templateHostVO = _templateHostDao.findByHostTemplate(secondaryStorageHost.getId(), templateId);
if (templateHostVO == null ||
templateHostVO.getDownloadState() != VMTemplateStorageResourceAssoc.Status.DOWNLOADED ||
(templatePath = templateHostVO.getInstallPath()) == null)
{
details = "Template id: " + templateId + " is not present on secondaryStorageHost Id: " + secondaryStorageHost.getId() + ". Can't create volume from ROOT DISK";
}
}
}
}
if (details == null) {
// everything went well till now
DataCenterVO dc = _dcDao.findById(originalVolume.getDataCenterId());
DiskOfferingVO diskOffering = null;
if (originalVolume.getVolumeType() == VolumeType.DATADISK || originalVolume.getVolumeType() == VolumeType.ROOT) {
Long diskOfferingId = originalVolume.getDiskOfferingId();
if (diskOfferingId != null) {
diskOffering = _diskOfferingDao.findById(diskOfferingId);
}
}
// else if (originalVolume.getVolumeType() == VolumeType.ROOT) {
// // Create a temporary disk offering with the same size as the ROOT DISK
// Long rootDiskSize = originalVolume.getSize();
// Long rootDiskSizeInMB = rootDiskSize/(1024*1024);
// Long sizeInGB = rootDiskSizeInMB/1024;
// String name = "Root Disk Offering";
// String displayText = "Temporary Disk Offering for Snapshot from Root Disk: " + originalVolume.getId() + "[" + sizeInGB + "GB Disk]";
// diskOffering = new DiskOfferingVO(originalVolume.getDomainId(), name, displayText, rootDiskSizeInMB, false, null);
else {
// The code never reaches here.
s_logger.error("Original volume must have been a ROOT DISK or a DATA DISK");
return null;
}
Pair<VolumeVO, String> volumeDetails = createVolumeFromSnapshot(userId, accountId, volumeName, dc, diskOffering, snapshot, templatePath, originalVolume.getSize(), template);
createdVolume = volumeDetails.first();
if (createdVolume != null) {
volumeId = createdVolume.getId();
}
details = volumeDetails.second();
}
Transaction txn = Transaction.currentTxn();
txn.start();
// Create an event
long templateId = -1;
long diskOfferingId = -1;
if(originalVolume.getTemplateId() != null){
templateId = originalVolume.getTemplateId();
}
diskOfferingId = originalVolume.getDiskOfferingId();
long sizeMB = createdVolume.getSize()/(1024*1024);
String poolName = _storagePoolDao.findById(createdVolume.getPoolId()).getName();
String eventParams = "id=" + volumeId +"\ndoId="+diskOfferingId+"\ntId="+templateId+"\ndcId="+originalVolume.getDataCenterId()+"\nsize="+sizeMB;
event = new EventVO();
event.setAccountId(accountId);
event.setUserId(userId);
event.setType(EventTypes.EVENT_VOLUME_CREATE);
event.setParameters(eventParams);
event.setStartId(startEventId);
event.setState(EventState.Completed);
if (createdVolume.getPath() != null) {
event.setDescription("Created volume: "+ createdVolume.getName() + " with size: " + sizeMB + " MB in pool: " + poolName + " from snapshot id: " + snapshotId);
event.setLevel(EventVO.LEVEL_INFO);
}
else {
details = "CreateVolume From Snapshot for snapshotId: " + snapshotId + " failed at the backend, reason " + details;
event.setDescription(details);
event.setLevel(EventVO.LEVEL_ERROR);
}
_eventDao.persist(event);
txn.commit();
return createdVolume;
}
protected Pair<String, String> createVDIFromSnapshot(long userId, SnapshotVO snapshot, StoragePoolVO pool, String templatePath) {
String vdiUUID = null;
Long volumeId = snapshot.getVolumeId();
VolumeVO volume = _volsDao.findById(volumeId);
String primaryStoragePoolNameLabel = pool.getUuid(); // pool's uuid is actually the namelabel.
String secondaryStoragePoolUrl = getSecondaryStorageURL(volume.getDataCenterId());
Long dcId = volume.getDataCenterId();
long accountId = volume.getAccountId();
String backedUpSnapshotUuid = snapshot.getBackupSnapshotId();
CreateVolumeFromSnapshotCommand createVolumeFromSnapshotCommand =
new CreateVolumeFromSnapshotCommand(primaryStoragePoolNameLabel,
secondaryStoragePoolUrl,
dcId,
accountId,
volumeId,
backedUpSnapshotUuid,
snapshot.getName(),
templatePath);
String basicErrMsg = "Failed to create volume from " + snapshot.getName() + " for volume: " + volume.getId();
CreateVolumeFromSnapshotAnswer answer = (CreateVolumeFromSnapshotAnswer) sendToHostsOnStoragePool(pool.getId(),
createVolumeFromSnapshotCommand,
basicErrMsg,
_totalRetries,
_pauseInterval,
_shouldBeSnapshotCapable, null);
if (answer != null && answer.getResult()) {
vdiUUID = answer.getVdi();
}
else if (answer != null) {
s_logger.error(basicErrMsg);
}
return new Pair<String, String>(vdiUUID, basicErrMsg);
}
@DB
protected VolumeVO createVolume(VolumeVO volume, VMInstanceVO vm, VMTemplateVO template, DataCenterVO dc, HostPodVO pod, Long clusterId,
ServiceOfferingVO offering, DiskOfferingVO diskOffering, List<StoragePoolVO> avoids, long size) {
StoragePoolVO pool = null;
final HashSet<StoragePool> avoidPools = new HashSet<StoragePool>(avoids);
DiskProfile dskCh = null;
if (volume.getVolumeType() == VolumeType.ROOT && Storage.ImageFormat.ISO != template.getFormat()) {
dskCh = createDiskCharacteristics(volume, template, dc, offering);
} else {
dskCh = createDiskCharacteristics(volume, template, dc, diskOffering);
}
Transaction txn = Transaction.currentTxn();
VolumeType volType = volume.getVolumeType();
VolumeTO created = null;
int retry = _retry;
while (--retry >= 0) {
created = null;
txn.start();
long podId = pod.getId();
pod = _podDao.lock(podId, true);
if (pod == null) {
txn.rollback();
volume.setStatus(AsyncInstanceCreateStatus.Failed);
volume.setDestroyed(true);
_volsDao.persist(volume);
throw new CloudRuntimeException("Unable to acquire lock on the pod " + podId);
}
pool = findStoragePool(dskCh, dc, pod, clusterId, offering, vm, template, avoidPools);
if (pool == null) {
txn.rollback();
break;
}
avoidPools.add(pool);
if (s_logger.isDebugEnabled()) {
s_logger.debug("Trying to create " + volume + " on " + pool);
}
volume.setPoolId(pool.getId());
_volsDao.persist(volume);
txn.commit();
CreateCommand cmd = null;
VMTemplateStoragePoolVO tmpltStoredOn = null;
if (volume.getVolumeType() == VolumeType.ROOT && Storage.ImageFormat.ISO != template.getFormat()) {
tmpltStoredOn = _tmpltMgr.prepareTemplateForCreate(template, pool);
if (tmpltStoredOn == null) {
continue;
}
cmd = new CreateCommand(volume, vm, dskCh, tmpltStoredOn.getLocalDownloadPath(), pool);
} else {
cmd = new CreateCommand(volume, vm, dskCh, pool, size);
}
Answer answer = sendToPool(pool, cmd);
if (answer != null && answer.getResult()) {
created = ((CreateAnswer)answer).getVolume();
break;
}
volume.setPoolId(null);
_volsDao.persist(volume);
s_logger.debug("Retrying the create because it failed on pool " + pool);
}
if (created == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Unable to create a volume for " + volume);
}
volume.setStatus(AsyncInstanceCreateStatus.Failed);
volume.setDestroyed(true);
_volsDao.persist(volume);
return null;
}
volume.setStatus(AsyncInstanceCreateStatus.Created);
volume.setFolder(pool.getPath());
volume.setPath(created.getPath());
volume.setSize(created.getSize());
volume.setPoolType(pool.getPoolType());
volume.setPodId(pod.getId());
_volsDao.persist(volume);
return volume;
}
@Override
public List<VolumeVO> create(Account account, VMInstanceVO vm, VMTemplateVO template, DataCenterVO dc, HostPodVO pod, ServiceOfferingVO offering, DiskOfferingVO diskOffering, long size) throws StorageUnavailableException, ExecutionException {
List<StoragePoolVO> avoids = new ArrayList<StoragePoolVO>();
return create(account, vm, template, dc, pod, offering, diskOffering, avoids, size);
}
@DB
protected List<VolumeVO> create(Account account, VMInstanceVO vm, VMTemplateVO template, DataCenterVO dc, HostPodVO pod,
ServiceOfferingVO offering, DiskOfferingVO diskOffering, List<StoragePoolVO> avoids, long size) {
ArrayList<VolumeVO> vols = new ArrayList<VolumeVO>(2);
VolumeVO dataVol = null;
VolumeVO rootVol = null;
Transaction txn = Transaction.currentTxn();
txn.start();
if (Storage.ImageFormat.ISO == template.getFormat()) {
rootVol = new VolumeVO(VolumeType.ROOT, vm.getId(), vm.getInstanceName() + "-ROOT", dc.getId(), pod.getId(), account.getId(), account.getDomainId(),(size>0)? size : diskOffering.getDiskSizeInBytes());
rootVol.setDiskOfferingId(diskOffering.getId());
rootVol.setSourceType(SourceType.Template);
rootVol.setSourceId(template.getId());
rootVol.setDeviceId(0l);
rootVol = _volsDao.persist(rootVol);
} else {
rootVol = new VolumeVO(VolumeType.ROOT, vm.getId(), template.getId(), vm.getInstanceName() + "-ROOT", dc.getId(), pod.getId(), account.getId(), account.getDomainId(), offering.isRecreatable());
rootVol.setDiskOfferingId(offering.getId());
rootVol.setTemplateId(template.getId());
rootVol.setSourceId(template.getId());
rootVol.setSourceType(SourceType.Template);
rootVol.setDeviceId(0l);
rootVol = _volsDao.persist(rootVol);
if (diskOffering != null && diskOffering.getDiskSizeInBytes() > 0) {
dataVol = new VolumeVO(VolumeType.DATADISK, vm.getId(), vm.getInstanceName() + "-DATA", dc.getId(), pod.getId(), account.getId(), account.getDomainId(), (size>0)? size : diskOffering.getDiskSizeInBytes());
dataVol.setDiskOfferingId(diskOffering.getId());
dataVol.setSourceType(SourceType.DiskOffering);
dataVol.setSourceId(diskOffering.getId());
dataVol.setDeviceId(1l);
dataVol = _volsDao.persist(dataVol);
}
}
txn.commit();
VolumeVO dataCreated = null;
VolumeVO rootCreated = null;
try {
rootCreated = createVolume(rootVol, vm, template, dc, pod, null, offering, diskOffering, avoids,size);
if (rootCreated == null) {
throw new CloudRuntimeException("Unable to create " + rootVol);
}
vols.add(rootCreated);
if (dataVol != null) {
StoragePoolVO pool = _storagePoolDao.findById(rootCreated.getPoolId());
dataCreated = createVolume(dataVol, vm, null, dc, pod, pool.getClusterId(), offering, diskOffering, avoids,size);
if (dataCreated == null) {
throw new CloudRuntimeException("Unable to create " + dataVol);
}
vols.add(dataCreated);
}
return vols;
} catch (Exception e) {
if (s_logger.isDebugEnabled()) {
s_logger.debug(e.getMessage());
}
if (rootCreated != null) {
destroyVolume(rootCreated);
}
throw new CloudRuntimeException("Unable to create volumes for " + vm, e);
}
}
@Override
public long createUserVM(Account account, VMInstanceVO vm, VMTemplateVO template, DataCenterVO dc, HostPodVO pod, ServiceOfferingVO offering, DiskOfferingVO diskOffering,
List<StoragePoolVO> avoids, long size) {
List<VolumeVO> volumes = create(account, vm, template, dc, pod, offering, diskOffering, avoids, size);
if( volumes == null || volumes.size() == 0) {
throw new CloudRuntimeException("Unable to create volume for " + vm.getName());
}
for (VolumeVO v : volumes) {
//when the user vm is created, the volume is attached upon creation
//set the attached datetime
try{
v.setAttached(new Date());
_volsDao.update(v.getId(), v);
}catch(Exception e)
{
s_logger.warn("Error updating the attached value for volume "+v.getId()+":"+e);
}
long templateId = -1;
if(v.getVolumeType() == VolumeType.ROOT && Storage.ImageFormat.ISO != template.getFormat()){
templateId = template.getId();
}
long volumeId = v.getId();
// Create an event
long sizeMB = v.getSize() / (1024 * 1024);
String eventParams = "id=" + volumeId + "\ndoId=" + v.getDiskOfferingId() + "\ntId=" + templateId + "\ndcId=" + dc.getId() + "\nsize=" + sizeMB;
EventVO event = new EventVO();
event.setAccountId(account.getId());
event.setUserId(1L);
event.setType(EventTypes.EVENT_VOLUME_CREATE);
event.setParameters(eventParams);
event.setDescription("Created volume: " + v.getName() + " with size: " + sizeMB + " MB");
_eventDao.persist(event);
}
return volumes.get(0).getPoolId();
}
public Long chooseHostForStoragePool(StoragePoolVO poolVO, List<Long> avoidHosts, boolean sendToVmResidesOn, Long vmId) {
if (sendToVmResidesOn) {
if (vmId != null) {
VMInstanceVO vmInstance = _vmInstanceDao.findById(vmId);
if (vmInstance != null) {
Long hostId = vmInstance.getHostId();
if (hostId != null && !avoidHosts.contains(vmInstance.getHostId()))
return hostId;
}
}
/*Can't find the vm where host resides on(vm is destroyed? or volume is detached from vm), randomly choose a host to send the cmd */
}
List<StoragePoolHostVO> poolHosts = _poolHostDao.listByHostStatus(poolVO.getId(), Status.Up);
Collections.shuffle(poolHosts);
if (poolHosts != null && poolHosts.size() > 0) {
for (StoragePoolHostVO sphvo : poolHosts) {
if (!avoidHosts.contains(sphvo.getHostId())) {
return sphvo.getHostId();
}
}
}
return null;
}
@Override
public String chooseStorageIp(VMInstanceVO vm, Host host, Host storage) {
Enumeration<StoragePoolAllocator> en = _storagePoolAllocators.enumeration();
while (en.hasMoreElements()) {
StoragePoolAllocator allocator = en.nextElement();
String ip = allocator.chooseStorageIp(vm, host, storage);
if (ip != null) {
return ip;
}
}
assert false : "Hmm....fell thru the loop";
return null;
}
@Override
public boolean unshare(VMInstanceVO vm, List<VolumeVO> vols, HostVO host) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Asking for volumes of " + vm.toString() + " to be unshared to " + (host != null ? host.toString() : "all"));
}
return true;
}
@Override
public void destroy(VMInstanceVO vm, List<VolumeVO> vols) {
if (s_logger.isDebugEnabled() && vm != null) {
s_logger.debug("Destroying volumes of " + vm.toString());
}
for (VolumeVO vol : vols) {
_volsDao.detachVolume(vol.getId());
_volsDao.destroyVolume(vol.getId());
// First delete the entries in the snapshot_policy and
// snapshot_schedule table for the volume.
// They should not get executed after the volume is destroyed.
_snapshotMgr.deletePoliciesForVolume(vol.getId());
String volumePath = vol.getPath();
Long poolId = vol.getPoolId();
if (poolId != null && volumePath != null && !volumePath.trim().isEmpty()) {
Answer answer = null;
StoragePoolVO pool = _storagePoolDao.findById(poolId);
final DestroyCommand cmd = new DestroyCommand(pool, vol);
boolean removed = false;
List<StoragePoolHostVO> poolhosts = _storagePoolHostDao.listByPoolId(poolId);
for (StoragePoolHostVO poolhost : poolhosts) {
answer = _agentMgr.easySend(poolhost.getHostId(), cmd);
if (answer != null && answer.getResult()) {
removed = true;
break;
}
}
if (removed) {
_volsDao.remove(vol.getId());
} else {
_alertMgr.sendAlert(AlertManager.ALERT_TYPE_STORAGE_MISC, vol.getDataCenterId(), vol.getPodId(),
"Storage cleanup required for storage pool: " + pool.getName(), "Volume folder: " + vol.getFolder() + ", Volume Path: " + vol.getPath() + ", Volume id: " +vol.getId()+ ", Volume Name: " +vol.getName()+ ", Storage PoolId: " +vol.getPoolId());
s_logger.warn("destroy volume " + vol.getFolder() + " : " + vol.getPath() + " failed for Volume id : " +vol.getId()+ " Volume Name: " +vol.getName()+ " Storage PoolId : " +vol.getPoolId());
}
} else {
_volsDao.remove(vol.getId());
}
}
}
@Override
public boolean configure(String name, Map<String, Object> params) throws ConfigurationException {
_name = name;
ComponentLocator locator = ComponentLocator.getCurrentLocator();
ConfigurationDao configDao = locator.getDao(ConfigurationDao.class);
if (configDao == null) {
s_logger.error("Unable to get the configuration dao.");
return false;
}
_storagePoolAllocators = locator.getAdapters(StoragePoolAllocator.class);
if (!_storagePoolAllocators.isSet()) {
throw new ConfigurationException("Unable to get any storage pool allocators.");
}
_discoverers = locator.getAdapters(StoragePoolDiscoverer.class);
String overProvisioningFactorStr = (String) params.get("storage.overprovisioning.factor");
if (overProvisioningFactorStr != null) {
_overProvisioningFactor = Integer.parseInt(overProvisioningFactorStr);
}
Map<String, String> configs = configDao.getConfiguration("management-server", params);
_retry = NumbersUtil.parseInt(configs.get(Config.StartRetry.key()), 2);
_pingInterval = NumbersUtil.parseInt(configs.get("ping.interval"), 60);
_hostRetry = NumbersUtil.parseInt(configs.get("host.retry"), 2);
_storagePoolAcquisitionWaitSeconds = NumbersUtil.parseInt(configs.get("pool.acquisition.wait.seconds"), 1800);
s_logger.info("pool.acquisition.wait.seconds is configured as " + _storagePoolAcquisitionWaitSeconds + " seconds");
_totalRetries = NumbersUtil.parseInt(configDao.getValue("total.retries"), 4);
_pauseInterval = 2*NumbersUtil.parseInt(configDao.getValue("ping.interval"), 60);
String hypervisoType = configDao.getValue("hypervisor.type");
if (hypervisoType.equalsIgnoreCase("KVM")) {
_hypervisorType = Hypervisor.Type.KVM;
} else if(hypervisoType.equalsIgnoreCase("vmware")) {
_hypervisorType = Hypervisor.Type.VmWare;
}
_agentMgr.registerForHostEvents(new StoragePoolMonitor(this, _hostDao, _storagePoolDao), true, false, true);
String storageCleanupEnabled = configs.get("storage.cleanup.enabled");
_storageCleanupEnabled = (storageCleanupEnabled == null) ? true : Boolean.parseBoolean(storageCleanupEnabled);
String time = configs.get("storage.cleanup.interval");
_storageCleanupInterval = NumbersUtil.parseInt(time, 86400);
String workers = configs.get("expunge.workers");
int wrks = NumbersUtil.parseInt(workers, 10);
_executor = Executors.newScheduledThreadPool(wrks, new NamedThreadFactory("StorageManager-Scavenger"));
boolean localStorage = Boolean.parseBoolean(configs.get(Config.UseLocalStorage.key()));
if (localStorage) {
_agentMgr.registerForHostEvents(ComponentLocator.inject(LocalStoragePoolListener.class), true, false, false);
}
PoolsUsedByVmSearch = _storagePoolDao.createSearchBuilder();
SearchBuilder<VolumeVO> volSearch = _volsDao.createSearchBuilder();
PoolsUsedByVmSearch.join("volumes", volSearch, volSearch.entity().getPoolId(), PoolsUsedByVmSearch.entity().getId());
volSearch.and("vm", volSearch.entity().getInstanceId(), SearchCriteria.Op.EQ);
volSearch.done();
PoolsUsedByVmSearch.done();
HostTemplateStatesSearch = _vmTemplateHostDao.createSearchBuilder();
HostTemplateStatesSearch.and("id", HostTemplateStatesSearch.entity().getTemplateId(), SearchCriteria.Op.EQ);
HostTemplateStatesSearch.and("state", HostTemplateStatesSearch.entity().getDownloadState(), SearchCriteria.Op.EQ);
SearchBuilder<HostVO> HostSearch = _hostDao.createSearchBuilder();
HostSearch.and("dcId", HostSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ);
HostTemplateStatesSearch.join("host", HostSearch, HostSearch.entity().getId(), HostTemplateStatesSearch.entity().getHostId());
HostSearch.done();
HostTemplateStatesSearch.done();
return true;
}
public String getVolumeFolder(String parentDir, long accountId, String diskFolderName) {
StringBuilder diskFolderBuilder = new StringBuilder();
Formatter diskFolderFormatter = new Formatter(diskFolderBuilder);
diskFolderFormatter.format("%s/u%06d/%s", parentDir, accountId, diskFolderName);
return diskFolderBuilder.toString();
}
public String getRandomVolumeName() {
return UUID.randomUUID().toString();
}
@Override
public boolean volumeOnSharedStoragePool(VolumeVO volume) {
Long poolId = volume.getPoolId();
if (poolId == null) {
return false;
} else {
StoragePoolVO pool = _storagePoolDao.findById(poolId);
if (pool == null) {
return false;
} else {
return pool.isShared();
}
}
}
@Override
public boolean volumeInactive(VolumeVO volume) {
Long vmId = volume.getInstanceId();
if (vmId != null) {
UserVm vm = _userVmDao.findById(vmId);
if (vm == null) {
return false;
}
if (!vm.getState().equals(State.Stopped)) {
return false;
}
}
return true;
}
@Override
public String getVmNameOnVolume(VolumeVO volume) {
Long vmId = volume.getInstanceId();
if (vmId != null) {
VMInstanceVO vm = _vmInstanceDao.findById(vmId);
if (vm == null) {
return null;
}
return vm.getInstanceName();
}
return null;
}
@Override
public String getAbsoluteIsoPath(long templateId, long dataCenterId) {
String isoPath = null;
List<HostVO> storageHosts = _hostDao.listBy(Host.Type.SecondaryStorage, dataCenterId);
if (storageHosts != null) {
for (HostVO storageHost : storageHosts) {
VMTemplateHostVO templateHostVO = _vmTemplateHostDao.findByHostTemplate(storageHost.getId(), templateId);
if (templateHostVO != null) {
isoPath = storageHost.getStorageUrl() + "/" + templateHostVO.getInstallPath();
break;
}
}
}
return isoPath;
}
@Override
public String getSecondaryStorageURL(long zoneId) {
// Determine the secondary storage URL
HostVO secondaryStorageHost = _hostDao.findSecondaryStorageHost(zoneId);
if (secondaryStorageHost == null) {
return null;
}
return secondaryStorageHost.getStorageUrl();
}
@Override
public HostVO getSecondaryStorageHost(long zoneId) {
return _hostDao.findSecondaryStorageHost(zoneId);
}
@Override
public String getStoragePoolTags(long poolId) {
return _configMgr.listToCsvTags(_storagePoolDao.searchForStoragePoolDetails(poolId, "true"));
}
@Override
public String getName() {
return _name;
}
@Override
public boolean start() {
if (_storageCleanupEnabled) {
_executor.scheduleWithFixedDelay(new StorageGarbageCollector(), _storageCleanupInterval, _storageCleanupInterval, TimeUnit.SECONDS);
} else {
s_logger.debug("Storage cleanup is not enabled, so the storage cleanup thread is not being scheduled.");
}
return true;
}
@Override
public boolean stop() {
if (_storageCleanupEnabled) {
_executor.shutdown();
}
return true;
}
protected StorageManagerImpl() {
}
@Override
public StoragePoolVO createPool(long zoneId, Long podId, Long clusterId, String poolName, URI uri, String tags, Map<String, String> details) throws ResourceInUseException, IllegalArgumentException, UnknownHostException, ResourceAllocationException {
if (tags != null) {
if (details == null) {
details = new HashMap<String, String>();
}
String[] tokens = tags.split(",");
for (String tag : tokens) {
tag = tag.trim();
if (tag.length() == 0) {
continue;
}
details.put(tag, "true");
}
}
Hypervisor.Type hypervisorType = null;
List<HostVO> hosts = null;
if (podId != null) {
hosts = _hostDao.listByHostPod(podId);
} else {
hosts = _hostDao.listByDataCenter(zoneId);
}
for (HostVO h : hosts) {
if (h.getType() == Type.Routing) {
hypervisorType = h.getHypervisorType();
break;
}
}
if (hypervisorType == null) {
if (_hypervisorType == Hypervisor.Type.KVM) {
hypervisorType = Hypervisor.Type.KVM;
} else if(_hypervisorType == Hypervisor.Type.VmWare) {
hypervisorType = Hypervisor.Type.VmWare;
} else {
s_logger.debug("Couldn't find a host to serve in the server pool");
return null;
}
}
String scheme = uri.getScheme();
String storageHost = uri.getHost();
String hostPath = uri.getPath();
int port = uri.getPort();
StoragePoolVO pool = null;
s_logger.debug("createPool Params @ scheme - " +scheme+ " storageHost - " +storageHost+ " hostPath - " +hostPath+ " port - " +port);
if (scheme.equalsIgnoreCase("nfs")) {
if (port == -1) {
port = 2049;
}
pool = new StoragePoolVO(StoragePoolType.NetworkFilesystem, storageHost, port, hostPath);
if (hypervisorType == Hypervisor.Type.XenServer && clusterId == null) {
throw new IllegalArgumentException("NFS need to have clusters specified for XenServers");
}
} else if (scheme.equalsIgnoreCase("file")) {
if (port == -1) {
port = 0;
}
pool = new StoragePoolVO(StoragePoolType.Filesystem, "localhost", 0, hostPath);
} else if (scheme.equalsIgnoreCase("iscsi")) {
String[] tokens = hostPath.split("/");
int lun = NumbersUtil.parseInt(tokens[tokens.length - 1], -1);
if (port == -1) {
port = 3260;
}
if (lun != -1) {
if (hypervisorType == Hypervisor.Type.XenServer && clusterId == null) {
throw new IllegalArgumentException("IscsiLUN need to have clusters specified");
}
hostPath.replaceFirst("/", "");
pool = new StoragePoolVO(StoragePoolType.IscsiLUN, storageHost, port, hostPath);
} else {
Enumeration<StoragePoolDiscoverer> en = _discoverers.enumeration();
while (en.hasMoreElements()) {
Map<StoragePoolVO, Map<String, String>> pools;
try {
pools = en.nextElement().find(zoneId, podId, uri, details);
} catch (DiscoveryException e) {
throw new IllegalArgumentException("Not enough information for discovery " + uri, e);
}
if (pools != null) {
Map.Entry<StoragePoolVO, Map<String, String>> entry = pools.entrySet().iterator().next();
pool = entry.getKey();
details = entry.getValue();
break;
}
}
}
} else if (scheme.equalsIgnoreCase("iso")) {
if (port == -1) {
port = 2049;
}
pool = new StoragePoolVO(StoragePoolType.ISO, storageHost, port, hostPath);
} else {
s_logger.warn("Unable to figure out the scheme for URI: " + uri);
throw new IllegalArgumentException("Unable to figure out the scheme for URI: " + uri);
}
if (pool == null) {
s_logger.warn("Unable to figure out the scheme for URI: " + uri);
throw new IllegalArgumentException("Unable to figure out the scheme for URI: " + uri);
}
List<StoragePoolVO> pools = _storagePoolDao.listPoolByHostPath(storageHost, hostPath);
if (!pools.isEmpty()) {
Long oldPodId = pools.get(0).getPodId();
throw new ResourceInUseException("Storage pool " + uri + " already in use by another pod (id=" + oldPodId + ")", "StoragePool", uri.toASCIIString());
}
// iterate through all the hosts and ask them to mount the filesystem.
// FIXME Not a very scalable implementation. Need an async listener, or
// perhaps do this on demand, or perhaps mount on a couple of hosts per
// pod
List<HostVO> allHosts = _hostDao.listBy(Host.Type.Routing, clusterId, podId, zoneId);
if (allHosts.isEmpty() && _hypervisorType != Hypervisor.Type.KVM) {
throw new ResourceAllocationException("No host exists to associate a storage pool with");
}
long poolId = _storagePoolDao.getNextInSequence(Long.class, "id");
String uuid = UUID.nameUUIDFromBytes(new String(storageHost + hostPath).getBytes()).toString();
List<StoragePoolVO> spHandles = _storagePoolDao.findIfDuplicatePoolsExistByUUID(uuid);
if(spHandles!=null && spHandles.size()>0)
{
s_logger.debug("Another active pool with the same uuid already exists");
throw new ResourceInUseException("Another active pool with the same uuid already exists");
}
s_logger.debug("In createPool Setting poolId - " +poolId+ " uuid - " +uuid+ " zoneId - " +zoneId+ " podId - " +podId+ " poolName - " +poolName);
pool.setId(poolId);
pool.setUuid(uuid);
pool.setDataCenterId(zoneId);
pool.setPodId(podId);
pool.setName(poolName);
pool.setClusterId(clusterId);
pool.setStatus(Status.Up);
pool = _storagePoolDao.persist(pool, details);
if (_hypervisorType == Hypervisor.Type.KVM && allHosts.isEmpty()) {
return pool;
}
s_logger.debug("In createPool Adding the pool to each of the hosts");
List<HostVO> poolHosts = new ArrayList<HostVO>();
for (HostVO h : allHosts) {
boolean success = addPoolToHost(h.getId(), pool);
if (success) {
poolHosts.add(h);
}
}
if (poolHosts.isEmpty()) {
_storagePoolDao.expunge(pool.getId());
pool = null;
} else {
createCapacityEntry(pool);
}
return pool;
}
@Override
public StoragePoolVO updateStoragePool(long poolId, String tags) throws IllegalArgumentException {
StoragePoolVO pool = _storagePoolDao.findById(poolId);
if (pool == null) {
throw new IllegalArgumentException("Unable to find storage pool with ID: " + poolId);
}
if (tags != null) {
Map<String, String> details = _storagePoolDao.getDetails(poolId);
String[] tagsList = tags.split(",");
for (String tag : tagsList) {
tag = tag.trim();
if (tag.length() > 0 && !details.containsKey(tag)) {
details.put(tag, "true");
}
}
_storagePoolDao.updateDetails(poolId, details);
}
return pool;
}
@Override
@DB
public boolean deletePool(long id) {
boolean deleteFlag = false;
// get the pool to delete
StoragePoolVO sPool = _storagePoolDao.findById(id);
if (sPool == null)
return false;
// for the given pool id, find all records in the storage_pool_host_ref
List<StoragePoolHostVO> hostPoolRecords = _storagePoolHostDao.listByPoolId(id);
// if not records exist, delete the given pool (base case)
if (hostPoolRecords.size() == 0) {
_storagePoolDao.remove(id);
return true;
} else {
// 1. Check if the pool has associated volumes in the volumes table
// 2. If it does, then you cannot delete the pool
Pair<Long, Long> volumeRecords = _volsDao.getCountAndTotalByPool(id);
if (volumeRecords.first() > 0) {
return false; // cannot delete as there are associated vols
}
// 3. Else part, remove the SR associated with the Xenserver
else {
// First get the host_id from storage_pool_host_ref for given
// pool id
StoragePoolVO lock = _storagePoolDao.acquire(sPool.getId());
try {
if (lock == null) {
s_logger.debug("Failed to acquire lock when deleting StoragePool with ID: " + sPool.getId());
return false;
}
for (StoragePoolHostVO host : hostPoolRecords) {
DeleteStoragePoolCommand cmd = new DeleteStoragePoolCommand(sPool);
final Answer answer = _agentMgr.easySend(host.getHostId(), cmd);
if (answer != null) {
if (answer.getResult() == true) {
deleteFlag = true;
break;
}
}
}
} finally {
if (lock != null) {
_storagePoolDao.release(lock.getId());
}
}
if (deleteFlag) {
// now delete the storage_pool_host_ref and storage_pool
// records
for (StoragePoolHostVO host : hostPoolRecords) {
_storagePoolHostDao.deleteStoragePoolHostDetails(host.getHostId(),host.getPoolId());
}
_storagePoolDao.remove(id);
return true;
}
}
}
return false;
}
@Override
public boolean addPoolToHost(long hostId, StoragePoolVO pool) {
s_logger.debug("Adding pool " + pool.getName() + " to host " + hostId);
if (pool.getPoolType() != StoragePoolType.NetworkFilesystem && pool.getPoolType() != StoragePoolType.Filesystem && pool.getPoolType() != StoragePoolType.IscsiLUN && pool.getPoolType() != StoragePoolType.Iscsi) {
return true;
}
ModifyStoragePoolCommand cmd = new ModifyStoragePoolCommand(true, pool);
final Answer answer = _agentMgr.easySend(hostId, cmd);
if (answer != null) {
if (answer.getResult() == false) {
String msg = "Add host failed due to ModifyStoragePoolCommand failed" + answer.getDetails();
_alertMgr.sendAlert(AlertManager.ALERT_TYPE_HOST, pool.getDataCenterId(), pool.getPodId(), msg, msg);
s_logger.warn(msg);
return false;
}
if (answer instanceof ModifyStoragePoolAnswer) {
ModifyStoragePoolAnswer mspAnswer = (ModifyStoragePoolAnswer) answer;
StoragePoolHostVO poolHost = _poolHostDao.findByPoolHost(pool.getId(), hostId);
if (poolHost == null) {
poolHost = new StoragePoolHostVO(pool.getId(), hostId, mspAnswer.getPoolInfo().getLocalPath().replaceAll("
_poolHostDao.persist(poolHost);
} else {
poolHost.setLocalPath(mspAnswer.getPoolInfo().getLocalPath().replaceAll("
}
pool.setAvailableBytes(mspAnswer.getPoolInfo().getAvailableBytes());
pool.setCapacityBytes(mspAnswer.getPoolInfo().getCapacityBytes());
_storagePoolDao.update(pool.getId(), pool);
return true;
}
} else {
return false;
}
return false;
}
@Override
public VolumeVO moveVolume(VolumeVO volume, long destPoolDcId, Long destPoolPodId, Long destPoolClusterId) throws InternalErrorException {
// Find a destination storage pool with the specified criteria
DiskOfferingVO diskOffering = _diskOfferingDao.findById(volume.getDiskOfferingId());
DiskProfile dskCh = new DiskProfile(volume.getId(), volume.getVolumeType(), volume.getName(), diskOffering.getId(), diskOffering.getDiskSizeInBytes(), diskOffering.getTagsArray(), diskOffering.getUseLocalStorage(), diskOffering.isRecreatable(), null);
DataCenterVO destPoolDataCenter = _dcDao.findById(destPoolDcId);
HostPodVO destPoolPod = _podDao.findById(destPoolPodId);
StoragePoolVO destPool = findStoragePool(dskCh, destPoolDataCenter, destPoolPod, destPoolClusterId, null, null, null, new HashSet<StoragePool>());
if (destPool == null) {
throw new InternalErrorException("Failed to find a storage pool with enough capacity to move the volume to.");
}
StoragePoolVO srcPool = _storagePoolDao.findById(volume.getPoolId());
String secondaryStorageURL = getSecondaryStorageURL(volume.getDataCenterId());
String secondaryStorageVolumePath = null;
// Find hosts where the source and destination storage pools are visible
Long sourceHostId = findHostIdForStoragePool(srcPool);
Long destHostId = findHostIdForStoragePool(destPool);
if (sourceHostId == null) {
throw new InternalErrorException("Failed to find a host where the source storage pool is visible.");
} else if (destHostId == null) {
throw new InternalErrorException("Failed to find a host where the dest storage pool is visible.");
}
// Copy the volume from the source storage pool to secondary storage
CopyVolumeCommand cvCmd = new CopyVolumeCommand(volume.getId(), volume.getPath(), srcPool, secondaryStorageURL, true);
CopyVolumeAnswer cvAnswer = (CopyVolumeAnswer) _agentMgr.easySend(sourceHostId, cvCmd);
if (cvAnswer == null || !cvAnswer.getResult()) {
throw new InternalErrorException("Failed to copy the volume from the source primary storage pool to secondary storage.");
}
secondaryStorageVolumePath = cvAnswer.getVolumePath();
// Copy the volume from secondary storage to the destination storage
// pool
cvCmd = new CopyVolumeCommand(volume.getId(), secondaryStorageVolumePath, destPool, secondaryStorageURL, false);
cvAnswer = (CopyVolumeAnswer) _agentMgr.easySend(destHostId, cvCmd);
if (cvAnswer == null || !cvAnswer.getResult()) {
throw new InternalErrorException("Failed to copy the volume from secondary storage to the destination primary storage pool.");
}
String destPrimaryStorageVolumePath = cvAnswer.getVolumePath();
String destPrimaryStorageVolumeFolder = cvAnswer.getVolumeFolder();
// Delete the volume on the source storage pool
final DestroyCommand cmd = new DestroyCommand(srcPool, volume);
Answer destroyAnswer = _agentMgr.easySend(sourceHostId, cmd);
if (destroyAnswer == null || !destroyAnswer.getResult()) {
throw new InternalErrorException("Failed to delete the volume from the source primary storage pool.");
}
volume.setPath(destPrimaryStorageVolumePath);
volume.setFolder(destPrimaryStorageVolumeFolder);
volume.setPodId(destPool.getPodId());
volume.setPoolId(destPool.getId());
_volsDao.update(volume.getId(), volume);
return _volsDao.findById(volume.getId());
}
@Override
@DB
public VolumeVO createVolume(long accountId, long userId, String userSpecifiedName, DataCenterVO dc, DiskOfferingVO diskOffering, long startEventId, long size)
{
String volumeName = "";
VolumeVO createdVolume = null;
try
{
// Determine the volume's name
volumeName = getRandomVolumeName();
// Create the Volume object and save it so that we can return it to the user
Account account = _accountDao.findById(accountId);
VolumeVO volume = new VolumeVO(userSpecifiedName, -1, -1, -1, -1, new Long(-1), null, null, 0, Volume.VolumeType.DATADISK);
volume.setPoolId(null);
volume.setDataCenterId(dc.getId());
volume.setPodId(null);
volume.setAccountId(accountId);
volume.setDomainId(account.getDomainId());
volume.setMirrorState(MirrorState.NOT_MIRRORED);
volume.setDiskOfferingId(diskOffering.getId());
volume.setStorageResourceType(Storage.StorageResourceType.STORAGE_POOL);
volume.setInstanceId(null);
volume.setUpdated(new Date());
volume.setStatus(AsyncInstanceCreateStatus.Creating);
volume.setDomainId(account.getDomainId());
volume.setSourceId(diskOffering.getId());
volume.setSourceType(SourceType.DiskOffering);
volume = _volsDao.persist(volume);
AsyncJobExecutor asyncExecutor = BaseAsyncJobExecutor.getCurrentExecutor();
if (asyncExecutor != null) {
AsyncJobVO job = asyncExecutor.getJob();
if (s_logger.isInfoEnabled())
s_logger.info("CreateVolume created a new instance " + volume.getId() + ", update async job-" + job.getId() + " progress status");
_asyncMgr.updateAsyncJobAttachment(job.getId(), "volume", volume.getId());
_asyncMgr.updateAsyncJobStatus(job.getId(), BaseCmd.PROGRESS_INSTANCE_CREATED, volume.getId());
}
List<StoragePoolVO> poolsToAvoid = new ArrayList<StoragePoolVO>();
Set<Long> podsToAvoid = new HashSet<Long>();
Pair<HostPodVO, Long> pod = null;
while ((pod = _agentMgr.findPod(null, null, dc, account.getId(), podsToAvoid)) != null) {
if ((createdVolume = createVolume(volume, null, null, dc, pod.first(), null, null, diskOffering, poolsToAvoid, size)) != null) {
break;
} else {
podsToAvoid.add(pod.first().getId());
}
}
// Create an event
EventVO event = new EventVO();
event.setAccountId(accountId);
event.setUserId(userId);
event.setType(EventTypes.EVENT_VOLUME_CREATE);
event.setStartId(startEventId);
Transaction txn = Transaction.currentTxn();
txn.start();
if (createdVolume != null) {
// Increment the number of volumes
_accountMgr.incrementResourceCount(accountId, ResourceType.volume);
// Set event parameters
long sizeMB = createdVolume.getSize() / (1024 * 1024);
StoragePoolVO pool = _storagePoolDao.findById(createdVolume.getPoolId());
String eventParams = "id=" + createdVolume.getId() + "\ndoId=" + diskOffering.getId() + "\ntId=" + -1 + "\ndcId=" + dc.getId() + "\nsize=" + sizeMB;
event.setLevel(EventVO.LEVEL_INFO);
event.setDescription("Created volume: " + createdVolume.getName() + " with size: " + sizeMB + " MB in pool: " + pool.getName());
event.setParameters(eventParams);
_eventDao.persist(event);
} else {
// Mark the existing volume record as corrupted
volume.setStatus(AsyncInstanceCreateStatus.Corrupted);
volume.setDestroyed(true);
_volsDao.update(volume.getId(), volume);
}
txn.commit();
} catch (Exception e) {
s_logger.error("Unhandled exception while saving volume " + volumeName, e);
}
return createdVolume;
}
@Override
@DB
public void destroyVolume(VolumeVO volume) {
Transaction txn = Transaction.currentTxn();
txn.start();
Long volumeId = volume.getId();
_volsDao.destroyVolume(volumeId);
String eventParams = "id=" + volumeId;
EventVO event = new EventVO();
event.setAccountId(volume.getAccountId());
event.setUserId(1L);
event.setType(EventTypes.EVENT_VOLUME_DELETE);
event.setParameters(eventParams);
event.setDescription("Volume " +volume.getName()+ " deleted");
event.setLevel(EventVO.LEVEL_INFO);
_eventDao.persist(event);
// Delete the recurring snapshot policies for this volume.
_snapshotMgr.deletePoliciesForVolume(volumeId);
// Decrement the resource count for volumes
_accountMgr.decrementResourceCount(volume.getAccountId(), ResourceType.volume);
txn.commit();
}
@Override
public void createCapacityEntry(StoragePoolVO storagePool) {
SearchCriteria<CapacityVO> capacitySC = _capacityDao.createSearchCriteria();
capacitySC.addAnd("hostOrPoolId", SearchCriteria.Op.EQ, storagePool.getId());
capacitySC.addAnd("dataCenterId", SearchCriteria.Op.EQ, storagePool.getDataCenterId());
capacitySC.addAnd("capacityType", SearchCriteria.Op.EQ, CapacityVO.CAPACITY_TYPE_STORAGE);
List<CapacityVO> capacities = _capacityDao.search(capacitySC, null);
if (capacities.size() == 0) {
CapacityVO capacity = new CapacityVO(storagePool.getId(), storagePool.getDataCenterId(), storagePool.getPodId(), 0L, storagePool.getCapacityBytes(),
CapacityVO.CAPACITY_TYPE_STORAGE);
_capacityDao.persist(capacity);
} else {
CapacityVO capacity = capacities.get(0);
if (capacity.getTotalCapacity() != storagePool.getCapacityBytes()) {
capacity.setTotalCapacity(storagePool.getCapacityBytes());
_capacityDao.update(capacity.getId(), capacity);
}
}
s_logger.debug("Successfully set Capacity - " +storagePool.getCapacityBytes()+ " for CAPACITY_TYPE_STORAGE, DataCenterId - " +storagePool.getDataCenterId()+ ", HostOrPoolId - " +storagePool.getId()+ ", PodId " +storagePool.getPodId());
capacitySC = _capacityDao.createSearchCriteria();
capacitySC.addAnd("hostOrPoolId", SearchCriteria.Op.EQ, storagePool.getId());
capacitySC.addAnd("dataCenterId", SearchCriteria.Op.EQ, storagePool.getDataCenterId());
capacitySC.addAnd("capacityType", SearchCriteria.Op.EQ, CapacityVO.CAPACITY_TYPE_STORAGE_ALLOCATED);
capacities = _capacityDao.search(capacitySC, null);
if (capacities.size() == 0) {
int provFactor = 1;
if( storagePool.getPoolType() == StoragePoolType.NetworkFilesystem ) {
provFactor = _overProvisioningFactor;
}
CapacityVO capacity = new CapacityVO(storagePool.getId(), storagePool.getDataCenterId(), storagePool.getPodId(), 0L, storagePool.getCapacityBytes()
* provFactor, CapacityVO.CAPACITY_TYPE_STORAGE_ALLOCATED);
_capacityDao.persist(capacity);
} else {
CapacityVO capacity = capacities.get(0);
long currCapacity = _overProvisioningFactor * storagePool.getCapacityBytes();
if (capacity.getTotalCapacity() != currCapacity) {
capacity.setTotalCapacity(currCapacity);
_capacityDao.update(capacity.getId(), capacity);
}
}
s_logger.debug("Successfully set Capacity - " +storagePool.getCapacityBytes()* _overProvisioningFactor+ " for CAPACITY_TYPE_STORAGE_ALLOCATED, DataCenterId - " +storagePool.getDataCenterId()+ ", HostOrPoolId - " +storagePool.getId()+ ", PodId " +storagePool.getPodId());
}
@Override
public Answer sendToHostsOnStoragePool(Long poolId, Command cmd, String basicErrMsg) {
return sendToHostsOnStoragePool(poolId, cmd, basicErrMsg, 1, 0, false, null);
}
@Override
public Answer sendToHostsOnStoragePool(Long poolId, Command cmd, String basicErrMsg, int totalRetries, int pauseBeforeRetry, boolean shouldBeSnapshotCapable,
Long vmId) {
Answer answer = null;
Long hostId = null;
StoragePoolVO storagePool = _storagePoolDao.findById(poolId);
List<Long> hostsToAvoid = new ArrayList<Long>();
int tryCount = 0;
boolean sendToVmHost = sendToVmResidesOn(cmd);
if (chooseHostForStoragePool(storagePool, hostsToAvoid, sendToVmHost, vmId) == null) {
// Don't just fail. The host could be reconnecting.
// wait for some time for it to get connected
// Wait for 3*ping.interval, since the code attempts a manual
// reconnect after that timeout.
try {
Thread.sleep(3 * _pingInterval * 1000);
} catch (InterruptedException e) {
s_logger.error("Interrupted while waiting for any host on poolId: " + poolId + " to get connected. " + e.getMessage());
// continue.
}
}
while ((hostId = chooseHostForStoragePool(storagePool, hostsToAvoid, sendToVmHost, vmId)) != null && tryCount++ < totalRetries) {
String errMsg = basicErrMsg + " on host: " + hostId + " try: " + tryCount + ", reason: ";
try {
HostVO hostVO = _hostDao.findById(hostId);
if (shouldBeSnapshotCapable) {
if (hostVO == null ) {
hostsToAvoid.add(hostId);
continue;
}
}
s_logger.debug("Trying to execute Command: " + cmd + " on host: " + hostId + " try: " + tryCount);
// set 120 min timeout for storage related command
answer = _agentMgr.send(hostId, cmd, 120*60*1000);
if (answer != null && answer.getResult()) {
return answer;
} else {
s_logger.warn(errMsg + ((answer != null) ? answer.getDetails() : "null"));
Thread.sleep(pauseBeforeRetry * 1000);
}
} catch (AgentUnavailableException e1) {
s_logger.warn(errMsg + e1.getMessage(), e1);
} catch (OperationTimedoutException e1) {
s_logger.warn(errMsg + e1.getMessage(), e1);
} catch (InterruptedException e) {
s_logger.warn(errMsg + e.getMessage(), e);
}
}
s_logger.error(basicErrMsg + ", no hosts available to execute command: " + cmd);
return answer;
}
protected class StorageGarbageCollector implements Runnable {
public StorageGarbageCollector() {
}
@Override
public void run() {
try {
s_logger.info("Storage Garbage Collection Thread is running.");
GlobalLock scanLock = GlobalLock.getInternLock(this.getClass().getName());
try {
if (scanLock.lock(3)) {
try {
cleanupStorage(true);
} finally {
scanLock.unlock();
}
}
} finally {
scanLock.releaseRef();
}
} catch (Exception e) {
s_logger.error("Caught the following Exception", e);
}
}
}
@Override
public void cleanupStorage(boolean recurring) {
// Cleanup primary storage pools
List<StoragePoolVO> storagePools = _storagePoolDao.listAll();
for (StoragePoolVO pool : storagePools) {
try {
if (recurring && pool.isLocal()) {
continue;
}
List<VMTemplateStoragePoolVO> unusedTemplatesInPool = _tmpltMgr.getUnusedTemplatesInPool(pool);
s_logger.debug("Storage pool garbage collector found " + unusedTemplatesInPool.size() + " templates to clean up in storage pool: " + pool.getName());
for (VMTemplateStoragePoolVO templatePoolVO : unusedTemplatesInPool) {
if (templatePoolVO.getDownloadState() != VMTemplateStorageResourceAssoc.Status.DOWNLOADED) {
s_logger.debug("Storage pool garbage collector is skipping templatePoolVO with ID: " + templatePoolVO.getId() + " because it is not completely downloaded.");
continue;
}
if (!templatePoolVO.getMarkedForGC()) {
templatePoolVO.setMarkedForGC(true);
_vmTemplatePoolDao.update(templatePoolVO.getId(), templatePoolVO);
s_logger.debug("Storage pool garbage collector has marked templatePoolVO with ID: " + templatePoolVO.getId() + " for garbage collection.");
continue;
}
_tmpltMgr.evictTemplateFromStoragePool(templatePoolVO);
}
} catch (Exception e) {
s_logger.warn("Problem cleaning up primary storage pool " + pool, e);
}
}
// Cleanup secondary storage hosts
List<HostVO> secondaryStorageHosts = _hostDao.listSecondaryStorageHosts();
for (HostVO secondaryStorageHost : secondaryStorageHosts) {
try {
long hostId = secondaryStorageHost.getId();
List<VMTemplateHostVO> destroyedTemplateHostVOs = _vmTemplateHostDao.listDestroyed(hostId);
s_logger.debug("Secondary storage garbage collector found " + destroyedTemplateHostVOs.size() + " templates to cleanup on secondary storage host: "
+ secondaryStorageHost.getName());
for (VMTemplateHostVO destroyedTemplateHostVO : destroyedTemplateHostVOs) {
if (!_tmpltMgr.templateIsDeleteable(destroyedTemplateHostVO)) {
s_logger.debug("Not deleting template at: " + destroyedTemplateHostVO.getInstallPath());
continue;
}
String installPath = destroyedTemplateHostVO.getInstallPath();
if (installPath != null) {
Answer answer = _agentMgr.easySend(hostId, new DeleteTemplateCommand(destroyedTemplateHostVO.getInstallPath()));
if (answer == null || !answer.getResult()) {
s_logger.debug("Failed to delete template at: " + destroyedTemplateHostVO.getInstallPath());
} else {
_vmTemplateHostDao.remove(destroyedTemplateHostVO.getId());
s_logger.debug("Deleted template at: " + destroyedTemplateHostVO.getInstallPath());
}
} else {
_vmTemplateHostDao.remove(destroyedTemplateHostVO.getId());
}
}
} catch (Exception e) {
s_logger.warn("problem cleaning up secondary storage " + secondaryStorageHost, e);
}
}
List<VolumeVO> vols = _volsDao.listRemovedButNotDestroyed();
for (VolumeVO vol : vols) {
try {
Long poolId = vol.getPoolId();
Answer answer = null;
StoragePoolVO pool = _storagePoolDao.findById(poolId);
final DestroyCommand cmd = new DestroyCommand(pool, vol);
answer = sendToPool(pool, cmd);
if (answer != null && answer.getResult()) {
s_logger.debug("Destroyed " + vol);
vol.setDestroyed(true);
_volsDao.update(vol.getId(), vol);
}
} catch (Exception e) {
s_logger.warn("Unable to destroy " + vol.getId(), e);
}
}
}
@Override
public List<StoragePoolVO> getStoragePoolsForVm(long vmId) {
SearchCriteria<StoragePoolVO> sc = PoolsUsedByVmSearch.create();
sc.setJoinParameters("volumes", "vm", vmId);
return _storagePoolDao.search(sc, null);
}
@Override
public String getPrimaryStorageNameLabel(VolumeVO volume) {
Long poolId = volume.getPoolId();
// poolId is null only if volume is destroyed, which has been checked before.
assert poolId != null;
StoragePoolVO storagePoolVO = _storagePoolDao.findById(poolId);
assert storagePoolVO != null;
return storagePoolVO.getUuid();
}
@Override
@DB
public boolean preparePrimaryStorageForMaintenance(long primaryStorageId, long userId)
{
long count = 1;
boolean restart = true;
try
{
//1. Get the primary storage record
StoragePoolVO primaryStorage = _storagePoolDao.findById(primaryStorageId);
if(primaryStorage == null)
{
s_logger.warn("The primary storage does not exist");
return false;
}
//check to see if other ps exist
//if they do, then we can migrate over the system vms to them
//if they dont, then just stop all vms on this one
List<StoragePoolVO> upPools = _storagePoolDao.listPoolsByStatus(Status.Up);
if(upPools==null || upPools.size()==0)
restart = false;
//2. Get a list of all the volumes within this storage pool
List<VolumeVO> allVolumes = _volsDao.findByPoolId(primaryStorageId);
//3. Each volume has an instance associated with it, stop the instance if running
for(VolumeVO volume : allVolumes)
{
VMInstanceVO vmInstance = _vmInstanceDao.findById(volume.getInstanceId());
if(vmInstance == null)
continue;
//shut down the running vms
if(vmInstance.getState().equals(State.Running) || vmInstance.getState().equals(State.Stopped) || vmInstance.getState().equals(State.Stopping) || vmInstance.getState().equals(State.Starting))
{
//if the instance is of type consoleproxy, call the console proxy
if(vmInstance.getType().equals(VirtualMachine.Type.ConsoleProxy))
{
//make sure it is not restarted again, update config to set flag to false
_configMgr.updateConfiguration(userId, "consoleproxy.restart", "false");
//create a dummy event
long eventId = saveScheduledEvent(User.UID_SYSTEM, Account.ACCOUNT_ID_SYSTEM, EventTypes.EVENT_PROXY_STOP, "stopping console proxy with Id: "+vmInstance.getId());
//call the consoleproxymanager
if(!_consoleProxyMgr.stopProxy(vmInstance.getId(), eventId))
{
s_logger.warn("There was an error stopping the console proxy id: "+vmInstance.getId()+" ,cannot enable storage maintenance");
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
}
else if(restart)
{
//create a dummy event
long eventId1 = saveScheduledEvent(User.UID_SYSTEM, Account.ACCOUNT_ID_SYSTEM, EventTypes.EVENT_PROXY_START, "starting console proxy with Id: "+vmInstance.getId());
//Restore config val for consoleproxy.restart to true
_configMgr.updateConfiguration(userId, "consoleproxy.restart", "true");
if(_consoleProxyMgr.startProxy(vmInstance.getId(), eventId1)==null)
{
s_logger.warn("There was an error starting the console proxy id: "+vmInstance.getId()+" on another storage pool, cannot enable primary storage maintenance");
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
}
}
}
//if the instance is of type uservm, call the user vm manager
if(vmInstance.getType().equals(VirtualMachine.Type.User))
{
if(!_userVmMgr.stopVirtualMachine(userId, vmInstance.getId()))
{
s_logger.warn("There was an error stopping the user vm id: "+vmInstance.getId()+" ,cannot enable storage maintenance");
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
}
}
//if the instance is of type secondary storage vm, call the secondary storage vm manager
if(vmInstance.getType().equals(VirtualMachine.Type.SecondaryStorageVm))
{
//create a dummy event
long eventId1 = saveScheduledEvent(User.UID_SYSTEM, Account.ACCOUNT_ID_SYSTEM, EventTypes.EVENT_SSVM_STOP, "stopping ssvm with Id: "+vmInstance.getId());
if(!_secStorageMgr.stopSecStorageVm(vmInstance.getId(), eventId1))
{
s_logger.warn("There was an error stopping the ssvm id: "+vmInstance.getId()+" ,cannot enable storage maintenance");
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
}
else if(restart)
{
//create a dummy event and restart the ssvm immediately
long eventId = saveScheduledEvent(User.UID_SYSTEM, Account.ACCOUNT_ID_SYSTEM, EventTypes.EVENT_SSVM_START, "starting ssvm with Id: "+vmInstance.getId());
if(_secStorageMgr.startSecStorageVm(vmInstance.getId(), eventId)==null)
{
s_logger.warn("There was an error starting the ssvm id: "+vmInstance.getId()+" on another storage pool, cannot enable primary storage maintenance");
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
}
}
}
//if the instance is of type domain router vm, call the network manager
if(vmInstance.getType().equals(VirtualMachine.Type.DomainRouter))
{
//create a dummy event
long eventId2 = saveScheduledEvent(User.UID_SYSTEM, Account.ACCOUNT_ID_SYSTEM, EventTypes.EVENT_ROUTER_STOP, "stopping domain router with Id: "+vmInstance.getId());
if(!_networkMgr.stopRouter(vmInstance.getId(), eventId2))
{
s_logger.warn("There was an error stopping the domain router id: "+vmInstance.getId()+" ,cannot enable primary storage maintenance");
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
}
else if(restart)
{
//create a dummy event and restart the domr immediately
long eventId = saveScheduledEvent(User.UID_SYSTEM, Account.ACCOUNT_ID_SYSTEM, EventTypes.EVENT_PROXY_START, "starting domr with Id: "+vmInstance.getId());
if(_networkMgr.startRouter(vmInstance.getId(), eventId)==null)
{
s_logger.warn("There was an error starting the omr id: "+vmInstance.getId()+" on another storage pool, cannot enable primary storage maintenance");
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
}
}
}
}
}
//5. Update the status
primaryStorage.setStatus(Status.Maintenance);
_storagePoolDao.persist(primaryStorage);
} catch (Exception e) {
s_logger.error("Exception in enabling primary storage maintenance:"+e);
}
return true;
}
private Long saveScheduledEvent(Long userId, Long accountId, String type, String description)
{
EventVO event = new EventVO();
event.setUserId(userId);
event.setAccountId(accountId);
event.setType(type);
event.setState(EventState.Scheduled);
event.setDescription("Scheduled async job for "+description);
event = _eventDao.persist(event);
return event.getId();
}
@Override
@DB
public boolean cancelPrimaryStorageForMaintenance(long primaryStorageId,long userId)
{
//1. Get the primary storage record
StoragePoolVO primaryStorage = _storagePoolDao.findById(primaryStorageId);
if(primaryStorage == null)
{
s_logger.warn("The primary storage does not exist");
return false;
}
//2. Get a list of all the volumes within this storage pool
List<VolumeVO> allVolumes = _volsDao.findByPoolId(primaryStorageId);
//3. If the volume is not removed AND not destroyed, start the vm corresponding to it
for(VolumeVO volume: allVolumes)
{
if((!volume.destroyed) && (volume.removed==null))
{
VMInstanceVO vmInstance = _vmInstanceDao.findById(volume.getInstanceId());
if(vmInstance.getState().equals(State.Stopping) || vmInstance.getState().equals(State.Stopped))
{
//if the instance is of type consoleproxy, call the console proxy
if(vmInstance.getType().equals(VirtualMachine.Type.ConsoleProxy))
{
//create a dummy event
long eventId = saveScheduledEvent(User.UID_SYSTEM, Account.ACCOUNT_ID_SYSTEM, EventTypes.EVENT_PROXY_START, "starting console proxy with Id: "+vmInstance.getId());
if(_consoleProxyMgr.startProxy(vmInstance.getId(), eventId)==null)
{
s_logger.warn("There was an error starting the console proxy id: "+vmInstance.getId()+" on storage pool, cannot complete primary storage maintenance");
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
}
}
//if the instance is of type ssvm, call the ssvm manager
if(vmInstance.getType().equals(VirtualMachine.Type.SecondaryStorageVm))
{
//create a dummy event
long eventId = saveScheduledEvent(User.UID_SYSTEM, Account.ACCOUNT_ID_SYSTEM, EventTypes.EVENT_SSVM_START, "starting ssvm with Id: "+vmInstance.getId());
if(_secStorageMgr.startSecStorageVm(vmInstance.getId(), eventId)==null)
{
s_logger.warn("There was an error starting the ssvm id: "+vmInstance.getId()+" on storage pool, cannot complete primary storage maintenance");
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
}
}
//if the instance is of type user vm, call the user vm manager
if(vmInstance.getType().equals(VirtualMachine.Type.User))
{
//create a dummy event
long eventId = saveScheduledEvent(User.UID_SYSTEM, Account.ACCOUNT_ID_SYSTEM, EventTypes.EVENT_VM_START, "starting ssvm with Id: "+vmInstance.getId());
try {
if(_userVmMgr.start(vmInstance.getId(), eventId)==null)
{
s_logger.warn("There was an error starting the ssvm id: "+vmInstance.getId()+" on storage pool, cannot complete primary storage maintenance");
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
}
} catch (StorageUnavailableException e) {
s_logger.warn("There was an error starting the ssvm id: "+vmInstance.getId()+" on storage pool, cannot complete primary storage maintenance");
s_logger.warn(e);
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
} catch (InsufficientCapacityException e) {
s_logger.warn("There was an error starting the ssvm id: "+vmInstance.getId()+" on storage pool, cannot complete primary storage maintenance");
s_logger.warn(e);
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
} catch (ConcurrentOperationException e) {
s_logger.warn("There was an error starting the ssvm id: "+vmInstance.getId()+" on storage pool, cannot complete primary storage maintenance");
s_logger.warn(e);
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
} catch (ExecutionException e) {
s_logger.warn("There was an error starting the ssvm id: "+vmInstance.getId()+" on storage pool, cannot complete primary storage maintenance");
s_logger.warn(e);
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
}
}
}
}
}
//Restore config val for consoleproxy.restart to true
try {
_configMgr.updateConfiguration(userId, "consoleproxy.restart", "true");
} catch (InvalidParameterValueException e) {
s_logger.warn("Error changing consoleproxy.restart back to false at end of cancel maintenance:"+e);
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
} catch (InternalErrorException e) {
s_logger.warn("Error changing consoleproxy.restart back to false at end of cancel maintenance:"+e);
primaryStorage.setStatus(Status.ErrorInMaintenance);
_storagePoolDao.persist(primaryStorage);
return false;
}
//Change the storage state back to up
primaryStorage.setStatus(Status.Up);
_storagePoolDao.persist(primaryStorage);
return true;
}
private boolean sendToVmResidesOn(Command cmd) {
if ((_hypervisorType == Hypervisor.Type.KVM) &&
((cmd instanceof ManageSnapshotCommand) ||
(cmd instanceof BackupSnapshotCommand))) {
return true;
} else {
return false;
}
}
protected DiskProfile toDiskProfile(VolumeVO vol, DiskOfferingVO offering) {
return new DiskProfile(vol.getId(), vol.getVolumeType(), vol.getName(), offering.getId(), vol.getSize(), offering.getTagsArray(), offering.getUseLocalStorage(), offering.isRecreatable(), vol.getTemplateId());
}
@Override
public <T extends VMInstanceVO> DiskProfile allocateRawVolume(VolumeType type, String name, DiskOfferingVO offering, Long size, T vm, AccountVO owner) {
if (size == null) {
size = offering.getDiskSizeInBytes();
}
VolumeVO vol = new VolumeVO(type, name, vm.getDataCenterId(), owner.getDomainId(), owner.getId(), offering.getId(), size);
if (vm != null) {
vol.setInstanceId(vm.getId());
}
vol = _volsDao.persist(vol);
return toDiskProfile(vol, offering);
}
@Override
public <T extends VMInstanceVO> DiskProfile allocateTemplatedVolume(VolumeType type, String name, DiskOfferingVO offering, VMTemplateVO template, T vm, AccountVO owner) {
assert (template.getFormat() != ImageFormat.ISO) : "ISO is not a template really....";
SearchCriteria<VMTemplateHostVO> sc = HostTemplateStatesSearch.create();
sc.setParameters("id", template.getId());
sc.setParameters("state", com.cloud.storage.VMTemplateStorageResourceAssoc.Status.DOWNLOADED);
sc.setJoinParameters("host", "dcId", vm.getDataCenterId());
List<VMTemplateHostVO> sss = _vmTemplateHostDao.search(sc, null);
if (sss.size() == 0) {
throw new CloudRuntimeException("Template " + template.getName() + " has not been completely downloaded to zone " + vm.getDataCenterId());
}
VMTemplateHostVO ss = sss.get(0);
VolumeVO vol = new VolumeVO(type, name, vm.getDataCenterId(), owner.getDomainId(), owner.getId(), offering.getId(), ss.getSize());
if (vm != null) {
vol.setInstanceId(vm.getId());
}
vol.setTemplateId(template.getId());
vol = _volsDao.persist(vol);
return toDiskProfile(vol, offering);
}
final protected DiskProfile createDiskCharacteristics(VolumeVO volume, DiskOfferingVO offering) {
return new DiskProfile(volume.getId(), volume.getVolumeType(), volume.getName(), offering.getId(), volume.getSize(), offering.getTagsArray(), offering.getUseLocalStorage(), offering.isRecreatable(), volume.getTemplateId());
}
final protected DiskProfile createDiskCharacteristics(VolumeVO volume) {
DiskOfferingVO offering = _diskOfferingDao.findById(volume.getDiskOfferingId());
return createDiskCharacteristics(volume, offering);
}
@Override
public <T extends VMInstanceVO> void create(T vm) {
List<VolumeVO> vols = _volsDao.findByInstance(vm.getId());
assert vols.size() >= 1 : "Come on, what's with the zero volumes for " + vm;
for (VolumeVO vol : vols) {
DiskProfile dskCh = createDiskCharacteristics(vol);
int retry = _retry;
while (--retry >= 0) {
}
}
/*
StoragePoolVO pool = null;
final HashSet<StoragePool> avoidPools = new HashSet<StoragePool>(avoids);
VolumeType volType = volume.getVolumeType();
VolumeTO created = null;
int retry = _retry;
while (--retry >= 0) {
created = null;
txn.start();
long podId = pod.getId();
pod = _podDao.lock(podId, true);
if (pod == null) {
txn.rollback();
volume.setStatus(AsyncInstanceCreateStatus.Failed);
volume.setDestroyed(true);
_volsDao.persist(volume);
throw new CloudRuntimeException("Unable to acquire lock on the pod " + podId);
}
pool = findStoragePool(dskCh, dc, pod, clusterId, offering, vm, template, avoidPools);
if (pool == null) {
txn.rollback();
break;
}
avoidPools.add(pool);
if (s_logger.isDebugEnabled()) {
s_logger.debug("Trying to create " + volume + " on " + pool);
}
volume.setPoolId(pool.getId());
_volsDao.persist(volume);
txn.commit();
CreateCommand cmd = null;
VMTemplateStoragePoolVO tmpltStoredOn = null;
if (volume.getVolumeType() == VolumeType.ROOT && Storage.ImageFormat.ISO != template.getFormat()) {
tmpltStoredOn = _tmpltMgr.prepareTemplateForCreate(template, pool);
if (tmpltStoredOn == null) {
continue;
}
cmd = new CreateCommand(volume, vm, dskCh, tmpltStoredOn.getLocalDownloadPath(), pool);
} else {
cmd = new CreateCommand(volume, vm, dskCh, pool, size);
}
Answer answer = sendToPool(pool, cmd);
if (answer != null && answer.getResult()) {
created = ((CreateAnswer)answer).getVolume();
break;
}
volume.setPoolId(null);
_volsDao.persist(volume);
s_logger.debug("Retrying the create because it failed on pool " + pool);
}
if (created == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Unable to create a volume for " + volume);
}
volume.setStatus(AsyncInstanceCreateStatus.Failed);
volume.setDestroyed(true);
_volsDao.persist(volume);
return null;
}
volume.setStatus(AsyncInstanceCreateStatus.Created);
volume.setFolder(pool.getPath());
volume.setPath(created.getPath());
volume.setSize(created.getSize());
volume.setPoolType(pool.getPoolType());
volume.setPodId(pod.getId());
_volsDao.persist(volume);
return volume;
*/
}
}
|
package com.esotericsoftware.clippy;
import static com.esotericsoftware.minlog.Log.*;
import java.awt.EventQueue;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.TimerTask;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.Clip;
import javax.sound.sampled.FloatControl;
import com.esotericsoftware.clippy.Win.LASTINPUTINFO;
import com.esotericsoftware.clippy.util.Util;
public class BreakWarning {
final Clippy clippy = Clippy.instance;
final LASTINPUTINFO lastInputInfo = new LASTINPUTINFO();
long inactiveTime, lastInactiveTime;
int inactiveCount;
long lastBreakTime = System.currentTimeMillis();
volatile ProgressBar progressBar;
Clip startClip, flashClip, endClip;
volatile boolean disabled;
public BreakWarning () {
if (clippy.config.breakWarningMinutes <= 0) return;
if (clippy.config.breakStartSound != null) startClip = loadClip(clippy.config.breakStartSound);
if (clippy.config.breakFlashSound != null) flashClip = loadClip(clippy.config.breakFlashSound);
if (clippy.config.breakEndSound != null) endClip = loadClip(clippy.config.breakEndSound);
Util.timer.schedule(new TimerTask() {
public void run () {
if (progressBar != null) return;
long inactiveMinutes = getInactiveMillis(true) / 1000 / 60;
if (inactiveMinutes >= clippy.config.breakResetMinutes) lastBreakTime = System.currentTimeMillis();
long activeMinutes = (System.currentTimeMillis() - lastBreakTime) / 1000 / 60 - inactiveMinutes;
if (activeMinutes >= clippy.config.breakWarningMinutes) showBreakDialog();
}
}, 5 * 1000, 5 * 1000);
}
void showBreakDialog () {
EventQueue.invokeLater(new Runnable() {
public void run () {
playClip(startClip, 1);
progressBar = new ProgressBar("");
progressBar.clickToDispose = false;
progressBar.red("");
new Thread("BreakWarning Dialog") {
{
setDaemon(true);
}
public void run () {
float indeterminateMillis = 5000;
float volume = 0.05f;
while (true) {
long inactiveMillis = getInactiveMillis(false);
long inactiveMinutes = inactiveMillis / 1000 / 60;
if (inactiveMinutes >= clippy.config.breakResetMinutes) break;
float percent = 1 - inactiveMillis / (float)(clippy.config.breakResetMinutes * 60 * 1000);
String message;
if (percent < 0.75f) {
indeterminateMillis = 0;
long breakSeconds = clippy.config.breakResetMinutes * 60 - inactiveMillis / 1000;
long minutes = breakSeconds / 60, seconds = breakSeconds - minutes * 60;
String secondsMessage = seconds + " second" + (seconds == 1 ? "" : "s");
String minutesMessage = minutes + " minute" + (minutes == 1 ? "" : "s");
if (minutes == 0)
message = "Break: " + secondsMessage;
else
message = "Break: " + minutesMessage + ", " + secondsMessage;
} else {
long activeMinutes = (System.currentTimeMillis() - lastBreakTime) / 1000 / 60;
long hours = activeMinutes / 60, minutes = activeMinutes - hours * 60;
String minutesMessage = minutes + " minute" + (minutes == 1 ? "" : "s");
String hoursMessage = hours + " hour" + (hours == 1 ? "" : "s");
if (hours == 0)
message = "Active: " + minutesMessage;
else if (minutes == 0)
message = "Active: " + hoursMessage;
else
message = "Active: " + hoursMessage + ", " + minutesMessage;
}
progressBar.progressBar.setString(message);
indeterminateMillis -= 100;
if (indeterminateMillis > 0) {
if (!progressBar.progressBar.isIndeterminate()) {
playClip(flashClip, volume);
volume += 0.1f;
progressBar.progressBar.setIndeterminate(true);
}
} else {
if (indeterminateMillis < -5 * 60 * 1000 && percent >= 0.99f) indeterminateMillis = 5000;
progressBar.setProgress(percent);
progressBar.toFront();
progressBar.setAlwaysOnTop(true);
}
Util.sleep(100);
}
lastBreakTime = System.currentTimeMillis();
playClip(endClip, 1);
progressBar.done("Break complete!", 2000);
progressBar = null;
}
}.start();
}
});
}
void playClip (Clip clip, float volume) {
if (volume < 0) volume = 0;
if (volume > 1) volume = 1;
clip.stop();
clip.setFramePosition(0);
((FloatControl)clip.getControl(FloatControl.Type.MASTER_GAIN)).setValue(20f * (float)Math.log10(volume));
clip.start();
}
private Clip loadClip (String sound) {
AudioInputStream audioInput = null;
try {
InputStream input;
if (sound.equals("breakStart"))
input = BreakWarning.class.getResourceAsStream("/breakStart.wav");
else if (sound.equals("breakFlash"))
input = BreakWarning.class.getResourceAsStream("/breakFlash.wav");
else if (sound.equals("breakEnd"))
input = BreakWarning.class.getResourceAsStream("/breakEnd.wav");
else
input = new FileInputStream(sound);
Clip clip = AudioSystem.getClip();
audioInput = AudioSystem.getAudioInputStream(new BufferedInputStream(input));
clip.open(audioInput);
return clip;
} catch (Exception ex) {
if (ERROR) error("Unable to load sound: " + sound, ex);
return null;
} finally {
if (audioInput != null) {
try {
audioInput.close();
} catch (IOException ignored) {
}
}
}
}
long getInactiveMillis (boolean sensitive) {
Win.User32.GetLastInputInfo(lastInputInfo);
long time = lastInputInfo.dwTime;
if (sensitive)
inactiveTime = time;
else {
if (time != lastInactiveTime) {
if (time - lastInactiveTime < 4000) {
inactiveCount++;
if (inactiveCount > 6) inactiveTime = time;
} else
inactiveCount = 0;
lastInactiveTime = time;
}
}
return Win.Kernel32.GetTickCount() - inactiveTime;
}
public void toggle () {
disabled = !disabled;
ProgressBar progressBar = this.progressBar;
if (progressBar != null) progressBar.setVisible(!disabled);
}
}
|
package com.flashmath.fragment;
import android.app.Fragment;
import android.graphics.Color;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import com.education.flashmath.R;
import com.flashmath.util.ColorUtil;
import com.jjoe64.graphview.GraphView;
import com.jjoe64.graphview.GraphView.GraphViewData;
import com.jjoe64.graphview.GraphViewSeries;
import com.jjoe64.graphview.GraphViewSeries.GraphViewSeriesStyle;
import com.jjoe64.graphview.CustomLabelFormatter;
import com.jjoe64.graphview.GraphViewStyle;
import com.jjoe64.graphview.LineGraphView;
public class LongGraphFragment extends Fragment{
private LinearLayout llStats;
private String subject;
private GraphViewData[] data;
public View onCreateView(LayoutInflater inf, ViewGroup parent, Bundle savedInstanceState ) {
return inf.inflate(R.layout.score_graph, parent, false);
}
public void setScores(GraphViewData[] data) {
this.data = data;
}
public void setSubject(String subject) {
this.subject = subject;
}
public void onActivityCreated(Bundle savedInstanceState){
super.onActivityCreated(savedInstanceState);
llStats = (LinearLayout) getActivity().findViewById(R.id.llStats);
GraphView graphView = new LineGraphView(getActivity(),"");
graphView.setCustomLabelFormatter(new CustomLabelFormatter.IntegerOnly());
GraphViewStyle style = new GraphViewStyle();
style.setVerticalLabelsColor(Color.BLACK);
style.setHorizontalLabelsColor(Color.BLACK);
style.setGridColor(Color.GRAY);
GraphViewSeriesStyle lineStyle = new GraphViewSeriesStyle(ColorUtil.subjectColorInt(subject), 5);
GraphViewSeries userData = new GraphViewSeries("Score", lineStyle, data);
graphView.addSeries(userData);
graphView.addSeries(new GraphViewSeries(new GraphViewData[] { new GraphViewData(1, 0) }));
graphView.addSeries(new GraphViewSeries(new GraphViewData[] { new GraphViewData(2, 3) }));
graphView.setGraphViewStyle(style);
llStats.addView(graphView);
llStats.setVisibility(View.VISIBLE);
}
public void clearScores() {
if (llStats != null) {
llStats.setVisibility(View.INVISIBLE);
GraphView graphView = new LineGraphView(getActivity(),"");
GraphViewStyle style = new GraphViewStyle();
style.setVerticalLabelsColor(Color.BLACK);
style.setHorizontalLabelsColor(Color.BLACK);
style.setGridColor(Color.GRAY);
style.setNumVerticalLabels(4);
graphView.addSeries(new GraphViewSeries(new GraphViewData[] { new GraphViewData(1, 0) }));
graphView.addSeries(new GraphViewSeries(new GraphViewData[] { new GraphViewData(2, 3) }));
graphView.setGraphViewStyle(style);
llStats.removeAllViews();
llStats.addView(graphView);
llStats.setVisibility(View.VISIBLE);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.