answer stringlengths 17 10.2M |
|---|
package radiancetops.com.resistora;
import android.hardware.Camera;
import android.util.Log;
import android.widget.TextView;
import java.io.*;
import java.util.ArrayList;
import java.util.Arrays;
public class ImageHandler implements Camera.PreviewCallback {
private int width, height, stripheight;
private double[] H, S, L;
private int[] rgb;
private double[] Ha, Sa, La, diff;
private int[] idxs;
private static double h, s, l, r, g, b;
TextView rtv;
MarkerView markerTextView;
public ImageHandler(int width, int height, int stripheight, TextView rtv,MarkerView markerView) {
super();
this.width = height;
this.height = width;
this.stripheight = stripheight;
this.H = new double[width * stripheight];
this.S = new double[width * stripheight];
this.L = new double[width * stripheight];
this.rgb = new int[width * stripheight];
this.Ha = new double[width];
this.Sa = new double[width];
this.La = new double[width];
this.diff = new double[width];
this.idxs = new int[4];
this.rtv = rtv;
this.markerTextView = markerView;
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Log.v("test", "test");
// Decode the image data to HSL
decodeNV21(data, width, height);
// Average data
avgImg();
// Find the maxima
findMaxima();
colors(idxs, rgb);
markerTextView.setBandLocation(idxs);
camera.addCallbackBuffer(data);
//camera.autoFocus(null);
}
private void colors(int[] idxs, int[] rgb) {
WIDTH = width;
HEIGHT = stripheight;
rgb1 = new int[WIDTH][HEIGHT];
output1 = new int[WIDTH][HEIGHT];
for(int i = 0; i < width; i++) {
for(int j = 0; j < stripheight; j++) {
rgb1[i][j] = rgb[j * width + i];
}
}
initializeColors();
normalizeSat();
avgColorStrip();
int[] cols = new int[4];
for(int i = 0; i < idxs.length-1; i++) {
/* image is reversed due to rotation */
cols[i] = getResistorColor(rgb1[width - idxs[i] - 1][0]);
}
cols[idxs.length-1] = getGoldSilver(rgb1[width - idxs[idxs.length-1] - 1][0]);
rtv.setText("\n" + resistanceValue(cols[3], cols[2], cols[1], cols[0]) + "\n" + cols[0] + " " + cols[1] + " " + cols[2] + " " + cols[3]);
//rtv.setText(idxs[0] + " " + idxs[1] + " " + idxs[2] + " " + idxs[3]);
}
private String resistanceValue (int a, int b, int c, int tolerance){
//gold is ten
int SILVER = 11;
//silver is eleven
int GOLD = 10;
if (a == 10) a = 1;
if (b == 10) b = 4;
if (a == 11) a = 8;
if (b == 11) b = 8;
int resistance = (int)((10 * a + b)*Math.pow(10,c));
String value = "\n" + resistance;
if (tolerance == 8){
tolerance = 11;
}
else tolerance = 10;
double mult = 1;
if(tolerance == GOLD) {
mult = 0.05;
} else {
mult = 0.1;
}
value+= " ± " + (int)(mult * resistance) + "Ω\n";
return value;
}
private void findMaxima() {
int[] midx = new int[4];
for(int i = 7; i < this.width - 7; i++) {
boolean nvalid = false;
for(int j = i - 4; j <= i + 4; j++) {
if(i == j) continue;
if(diff[j] >= diff[i]) {
nvalid = true;
break;
}
}
if(!nvalid) {
if(diff[i] > diff[midx[3]]) {
midx[3] = i;
for(int q = 3; q >= 1; q
if(diff[midx[q]] > diff[midx[q-1]]) {
int tmp = midx[q];
midx[q] = midx[q-1];
midx[q-1] = tmp;
}
}
}
}
}
Log.v("idx", midx[0] + " " + midx[1] + " " + midx[2] + " " + midx[3]);
for(int i = 0; i < 4; i++) {
/* the image is reversed due to the rotation */
idxs[i] = width - midx[i] - 1;
}
Arrays.sort(idxs);
}
private void avgImg() {
for(int i = 0; i < width; i++) {
for (int j = 0; j < stripheight; j++) {
Ha[i] += H[i + j * width];
Sa[i] += S[i + j * width];
La[i] += L[i + j * width];
}
Ha[i] /= stripheight;
Sa[i] /= stripheight;
La[i] /= stripheight;
diff[i] = Sa[i] - La[i];
}
}
public void writeCSV () {
Log.v("idx", idxs[0] + " " + idxs[1] + " " + idxs[2] + " " + idxs[3]);
/*
try {
PrintWriter pw = new PrintWriter(new FileWriter("data.csv"));
for (int i = 0; i < width; i++) {
pw.println(Ha[i] + ","+ Sa[i]+ "," + La[i]);
}
pw.close();
} catch (IOException e) {}
*/
}
private void decodeNV21(byte[] data, int height, int width) {
final int frameSize = width * height;
for (int j = 0; j < this.width; ++j) {
for (int i = this.height / 2 - stripheight / 2; i < this.height / 2 + stripheight / 2; ++i) {
int y = (0xff & ((int) data[j * this.height + i]));
int v = (0xff & ((int) data[frameSize + (j >> 1) * width + (i & ~1) + 0]));
int u = (0xff & ((int) data[frameSize + (j >> 1) * width + (i & ~1) + 1]));
int a = (i - (this.height / 2 - stripheight / 2)) * this.width + j;
int rgb = this.rgb[a] = YUVtoRGB(y, u, v);
double r = (0xff & (rgb >> 16)) / 255.;
double g = (0xff & (rgb >> 8)) / 255.;
double b = (0xff & (rgb >> 0)) / 255.;
double max = Math.max(r, Math.max(g, b)), min = Math.min(r, Math.min(g, b));
L[a] = ((max + min) / 2);
if(max == min){
H[a] = S[a] = 0; // achromatic
} else {
double d = max - min;
S[a] = L[a] > 0.5 ? d / (double) (2 - max - min) : d / (double) (max + min);
if (max == r) {
H[a] = (g - b) / (double) d + (g < b ? 6 : 0);
} else if (max == g) {
H[a] = (b - r) / (double) d + 1;
} else {
H[a] = (r - g) / (double) d + 4;
}
H[a] /= 6;
}
}
}
}
private int YUVtoRGB(int y, int u, int v) {
y = y < 16 ? 16 : y;
int r = (int) (1.164f * (y - 16) + 1.596f * (v - 128));
int g = (int) (1.164f * (y - 16) - 0.813f * (v - 128) - 0.391f * (u - 128));
int b = (int) (1.164f * (y - 16) + 2.018f * (u - 128));
r = r < 0 ? 0 : (r > 255 ? 255 : r);
g = g < 0 ? 0 : (g > 255 ? 255 : g);
b = b < 0 ? 0 : (b > 255 ? 255 : b);
return 0xff000000 | (r << 16) | (g << 8) | b;
}
private static int WIDTH, HEIGHT;
private static int[][] rgb1;
private static int[][] output1;
private static int[] presetRGB = new int[20];
private static double avgr, avgg, avgb, avgsat;
private static void initializeColors () {
presetRGB[0] = rgbToInt(0,0,0);
presetRGB[1] = rgbToInt(102, 51, 50);
presetRGB[2] = rgbToInt(255,0,0);
presetRGB[3] = rgbToInt(255, 102, 0);
presetRGB[4] = rgbToInt(255, 255, 0);
presetRGB[5] = rgbToInt(0, 255, 0);
presetRGB[6] = rgbToInt(0, 0, 255);
presetRGB[7] = rgbToInt(206, 101, 255);
presetRGB[8] = rgbToInt(130, 130, 130);
presetRGB[9] = rgbToInt(255, 255, 255);
presetRGB[10] = rgbToInt(205, 153, 51);
presetRGB[11] = rgbToInt(204, 204, 204);
}
private static void normalizeSat () {
avgsat = 0;
for (int i = 0; i < WIDTH; i++)
for (int j = 0; j < HEIGHT; j++) {
toHSL(getRed(rgb1[i][j]), getGreen(rgb1[i][j]), getBlue(rgb1[i][j]));
avgsat += s;
}
avgsat /= HEIGHT * WIDTH;
for (int i = 0; i < WIDTH; i++) {
for (int j = 0; j < HEIGHT; j++) {
toHSL(getRed(rgb1[i][j]), getGreen(rgb1[i][j]), getBlue(rgb1[i][j]));
s = Math.min(1.0, s / avgsat / 2);
toRGB(h, s, l);
rgb1[i][j] = rgbToInt((int)r, (int)g, (int)b);
}
}
}
private static void normalizeGray(){
double avgR = 0, avgB = 0, avgG = 0;
for (int i = 0; i<WIDTH; i++){
for (int j = 0; j<HEIGHT; j++){
avgR += getRed(rgb1[i][j]);
avgB += getBlue(rgb1[i][j]);
avgG += getGreen(rgb1[i][j]);
}
}
avgR /= HEIGHT*WIDTH;
avgB /= HEIGHT*WIDTH;
avgG /= HEIGHT*WIDTH;
for (int i = 0; i<WIDTH; i++){
for (int j = 0; j<HEIGHT; j++){
int tr = (int)(getRed(rgb1[i][j])/avgR*128);
int tg = (int)(getGreen(rgb1[i][j])/avgG*128);
int tb = (int)(getBlue(rgb1[i][j])/avgB*128);
rgb1[i][j] = rgbToInt( Math.max(0,Math.min(255,tr)), Math.max(0,Math.min(255,tg)),Math.max(0,Math.min(255,tb)));
}
}
}
private static void avgColorStrip () {
for (int i = 0; i < WIDTH; i++) {
avgr = 0;
avgg = 0;
avgb = 0;
for (int j = 0; j < HEIGHT; j++) {
avgr += getRed(rgb1[i][j]);
avgg += getGreen(rgb1[i][j]);
avgb += getBlue(rgb1[i][j]);
}
avgr /= HEIGHT;
avgg /= HEIGHT;
avgb /= HEIGHT;
for (int j = 0; j < HEIGHT; j++)
rgb1[i][j] = rgbToInt((int)avgr, (int)avgg, (int)avgb);
}
}
private static void replaceColors () {
for (int i = 0; i < WIDTH; i++) {
for (int j = 0; j < HEIGHT; j++) {
rgb1[i][j] = getResistorColor(rgb1[i][j]);
output1[i][j] = rgb1[i][j];
}
}
}
private static int getResistorColor (int rgb) {
r = getRed(rgb);
g = getGreen(rgb);
b = getBlue(rgb);
toHSL(r, g, b);
// BLACK AND WHITE
if (l < 0.13) return 0;
if (l > 0.90) return 9;
if (Math.max(r, Math.max(g, b)) - Math.min(r, Math.min(g,b)) < 10){
return 8;
}
if (h > 0.95 || h < 0.093){ // red,orange or brown
if (((l < 0.32 || s<0.51) && (h>0.01 && h < 0.04)) || ((l<0.29 || s < 0.42) && h>=0.05 && h <= 0.093)) return 1;
else if ( h>0.9 || h < 0.05) return 2;
else return 3;
}
if (h >= 0.093 && h < 0.21){
return 4;
}
if (h >= 0.21 && h < 0.49)
return 5;
if (h >= 0.49 && h < 0.69)
return 6;
if (h>=0.69 && h <= 0.95)
return 7;
return 12;
}
private static int getGoldSilver(int rgb){
if (Math.max(r, Math.max(g, b)) - Math.min(r, Math.min(g,b)) < 10){
return 11;
}
return 10;
}
// get the R value (0, 255) from a 32 bit integer
private static int getRed (int n) {
return 0xFF & (n >> 16);
}
// get the G value (0, 255) from a 32 bit integer
private static int getBlue (int n) {
return 0xFF & (n >> 0);
}
// get the B value (0, 255) from a 32 bit integer
private static int getGreen (int n) {
return 0xFF & (n >> 8);
}
private static void toHSL (double r, double g, double b) {
r = r / 255.0; // RED
g = g / 255.0; // GREEN
b = b / 255.0; // BLUE
double max = Math.max(r, Math.max(g, b));
double min = Math.min(r, Math.min(g, b));
h = (max + min) / 2.0;
s = (max + min) / 2.0;
l = (max + min) / 2.0;
if (max == min) {
h = s = 0;
} else {
double d = max - min;
s = l > 0.5 ? d / (2.0 - max - min) : d / (max + min);
if (max == r) {
h = (g - b) / d + (g < b ? 6 : 0);
} else if (max == g) {
h = (b - r) / d + 2;
} else if (max == b) {
h = (r - g) / d + 4;
}
h /= 6.0;
}
}
private static int rgbToInt(int locR, int locG, int locB){
int a = 255;
return (((a<<8)+locR<<8)+locG<<8)+locB;
}
private static void toRGB (double h, double s, double l) {
if (s == 0) {
r = g = b = 1;
} else {
double q = l < 0.5 ? l * (1 + s) : l + s - l * s;
double p = 2 * l - q;
r = hueToRGB(p, q, (h + 1.0d/3.0d));
g = hueToRGB(p, q, h);
b = hueToRGB(p, q, (h - 1.0d/3.0d));
}
r = Math.round(r * 255);
g = Math.round(g * 255);
b = Math.round(b * 255);
}
private static double hueToRGB (double p, double q, double t) {
if(t < 0.0d) t += 1;
if(t > 1.0d) t -= 1;
if(t < 1.0d/6.0d) return p + (q - p) * 6 * t;
if(t < 1.0d/2.0d) return q;
if(t < 2.0d/3.0d) return p + (q - p) * (2.0/3.0 - t) * 6;
return p;
}
} |
package com.example.androidfileexplorer;
import java.io.File;
import java.util.List;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
public class DirListArrayAdapter extends ArrayAdapter<File> {
private final Context mContext;
static class ViewHolder {
public TextView text;
}
public DirListArrayAdapter(Context context, int resource, List<File> objects) {
super(context, resource, objects);
this.mContext = context;
}
@Override
public View getView(int pos, View convertView, ViewGroup parent) {
if (convertView == null) {
LayoutInflater vi = (LayoutInflater) mContext
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = vi.inflate(R.layout.entry, null);
ViewHolder holder = new ViewHolder();
holder.text = (TextView) convertView.findViewById(R.id.entry);
convertView.setTag(holder);
}
ViewHolder holder = (ViewHolder) convertView.getTag();
File f = getItem(pos);
holder.text.setText(f.getName());
if (f.isDirectory()) {
holder.text.setTextColor(mContext.getResources().getColor(
R.color.dir_color));
} else {
holder.text.setTextColor(mContext.getResources().getColor(
R.color.file_color));
}
return convertView;
}
} |
package org.bimserver.client;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.bimserver.database.queries.om.Include;
import org.bimserver.database.queries.om.JsonQueryObjectModelConverter;
import org.bimserver.database.queries.om.Query;
import org.bimserver.database.queries.om.QueryException;
import org.bimserver.database.queries.om.QueryPart;
import org.bimserver.emf.IdEObject;
import org.bimserver.emf.IdEObjectImpl;
import org.bimserver.emf.IdEObjectImpl.State;
import org.bimserver.emf.IfcModelInterfaceException;
import org.bimserver.emf.OidProvider;
import org.bimserver.emf.PackageMetaData;
import org.bimserver.emf.SharedJsonDeserializer;
import org.bimserver.emf.SharedJsonSerializer;
import org.bimserver.ifc.IfcModel;
import org.bimserver.ifc.IfcModelChangeListener;
import org.bimserver.interfaces.objects.SActionState;
import org.bimserver.interfaces.objects.SDeserializerPluginConfiguration;
import org.bimserver.interfaces.objects.SLongActionState;
import org.bimserver.interfaces.objects.SSerializerPluginConfiguration;
import org.bimserver.models.geometry.GeometryData;
import org.bimserver.models.geometry.GeometryFactory;
import org.bimserver.models.geometry.GeometryInfo;
import org.bimserver.models.geometry.GeometryPackage;
import org.bimserver.plugins.ObjectAlreadyExistsException;
import org.bimserver.plugins.deserializers.DeserializeException;
import org.bimserver.plugins.serializers.SerializerInputstream;
import org.bimserver.plugins.services.Flow;
import org.bimserver.shared.exceptions.PublicInterfaceNotFoundException;
import org.bimserver.shared.exceptions.ServerException;
import org.bimserver.shared.exceptions.ServiceException;
import org.bimserver.shared.exceptions.UserException;
import org.bimserver.shared.interfaces.LowLevelInterface;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EClassifier;
import org.eclipse.emf.ecore.EEnum;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.ecore.EcorePackage;
import org.eclipse.emf.ecore.util.EContentAdapter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.base.Joiner;
import com.google.common.io.LittleEndianDataInputStream;
public class ClientIfcModel extends IfcModel {
public enum ModelState {
NONE, LOADING, FULLY_LOADED
}
private static final Logger LOGGER = LoggerFactory.getLogger(ClientIfcModel.class);
private BimServerClient bimServerClient;
private ModelState modelState = ModelState.NONE;
private long tid = -1;
private long roid;
private final Set<String> loadedClasses = new HashSet<String>();
private long ifcSerializerOid = -1;
private long binaryGeometrySerializerOid = -1;
private int cachedObjectCount = -1;
private boolean recordChanges;
private boolean includeGeometry;
public ClientIfcModel(BimServerClient bimServerClient, long poid, long roid, boolean deep, PackageMetaData packageMetaData, boolean recordChanges, boolean includeGeometry)
throws ServerException, UserException, PublicInterfaceNotFoundException {
super(packageMetaData, null);
this.recordChanges = recordChanges;
this.bimServerClient = bimServerClient;
this.roid = roid;
this.includeGeometry = includeGeometry;
if (recordChanges) {
try {
tid = bimServerClient.getLowLevelInterface().startTransaction(poid);
} catch (Exception e) {
LOGGER.error("", e);
}
}
if (deep) {
try {
loadDeep();
} catch (QueryException e) {
LOGGER.error("", e);
}
}
}
private ClientIfcModel(BimServerClient bimServerClient, PackageMetaData packageMetaData, long poid, boolean recordChanges) {
super(packageMetaData, null);
this.bimServerClient = bimServerClient;
this.recordChanges = recordChanges;
if (recordChanges) {
try {
tid = bimServerClient.getLowLevelInterface().startTransaction(poid);
} catch (Exception e) {
LOGGER.error("", e);
}
}
}
private EContentAdapter adapter = new EContentAdapter() {
public void notifyChanged(Notification notification) {
super.notifyChanged(notification);
IdEObject idEObject = (IdEObject) notification.getNotifier();
EStructuralFeature eFeature = (EStructuralFeature) notification.getFeature();
if (notification.getEventType() == Notification.ADD) {
if (getModelState() != ModelState.LOADING) {
try {
if (eFeature.getEType() == EcorePackage.eINSTANCE.getEString()) {
bimServerClient.getLowLevelInterface().addStringAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), notification.getNewStringValue());
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getELong() || eFeature.getEType() == EcorePackage.eINSTANCE.getELongObject()) {
throw new UnsupportedOperationException();
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEDouble() || eFeature.getEType() == EcorePackage.eINSTANCE.getEDoubleObject()) {
bimServerClient.getLowLevelInterface().addDoubleAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), (Double) notification.getNewValue());
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEBoolean() || eFeature.getEType() == EcorePackage.eINSTANCE.getEBooleanObject()) {
bimServerClient.getLowLevelInterface().addBooleanAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), notification.getNewBooleanValue());
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEInt() || eFeature.getEType() == EcorePackage.eINSTANCE.getEIntegerObject()) {
bimServerClient.getLowLevelInterface().addIntegerAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), notification.getNewIntValue());
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEByteArray()) {
throw new UnsupportedOperationException();
} else if (eFeature.getEType() instanceof EEnum) {
throw new UnsupportedOperationException();
} else if (eFeature instanceof EReference) {
if (notification.getNewValue() == null) {
} else {
bimServerClient.getLowLevelInterface().addReference(getTransactionId(), idEObject.getOid(), eFeature.getName(), ((IdEObject) notification.getNewValue()).getOid());
}
} else {
throw new RuntimeException("Unimplemented " + eFeature.getEType().getName() + " " + notification.getNewValue());
}
} catch (ServiceException e) {
LOGGER.error("", e);
} catch (PublicInterfaceNotFoundException e) {
LOGGER.error("", e);
}
}
} else if (notification.getEventType() == Notification.REMOVE) {
if (getModelState() != ModelState.LOADING) {
try {
if (eFeature instanceof EReference) {
IdEObject oldValue = (IdEObject) notification.getOldValue();
bimServerClient.getLowLevelInterface().removeReferenceByOid(getTransactionId(), idEObject.getOid(), eFeature.getName(), oldValue.getOid());
} else {
throw new RuntimeException("Unimplemented " + eFeature.getEType().getName() + " " + notification.getNewValue());
}
} catch (ServiceException e) {
LOGGER.error("", e);
} catch (PublicInterfaceNotFoundException e) {
LOGGER.error("", e);
}
}
}
}
};
@SuppressWarnings({ "unchecked", "rawtypes" })
public ClientIfcModel branch(long poid, boolean recordChanges) {
// TODO this should of course be done server side, without any copying
ClientIfcModel branch = new ClientIfcModel(bimServerClient, getPackageMetaData(), poid, recordChanges);
try {
loadDeep();
} catch (ServerException e) {
LOGGER.error("", e);
} catch (UserException e) {
LOGGER.error("", e);
} catch (PublicInterfaceNotFoundException e) {
LOGGER.error("", e);
} catch (QueryException e) {
LOGGER.error("", e);
}
Map<IdEObject, IdEObject> map = new HashMap<IdEObject, IdEObject>();
for (IdEObject sourceObject : getValues()) {
try {
IdEObjectImpl targetObject = branch.create(sourceObject.eClass());
targetObject.setLoadingState(State.LOADED);
map.put(sourceObject, targetObject);
} catch (IfcModelInterfaceException e) {
LOGGER.error("", e);
}
}
for (IdEObject sourceObject : getObjects().values()) {
IdEObject targetObject = map.get(sourceObject);
for (EStructuralFeature eStructuralFeature : sourceObject.eClass().getEAllStructuralFeatures()) {
Object sourceValue = sourceObject.eGet(eStructuralFeature);
if (eStructuralFeature instanceof EReference) {
if (eStructuralFeature.isMany()) {
List sourceList = (List) sourceValue;
List targetList = (List) targetObject.eGet(eStructuralFeature);
for (Object sourceItem : sourceList) {
IdEObject e = map.get(sourceItem);
if (e != null) {
targetList.add(e);
}
}
} else {
targetObject.eSet(eStructuralFeature, map.get(sourceValue));
}
} else {
if (eStructuralFeature.isMany()) {
List sourceList = (List) sourceValue;
List targetList = (List) targetObject.eGet(eStructuralFeature);
for (Object sourceItem : sourceList) {
targetList.add(sourceItem);
}
} else {
targetObject.eSet(eStructuralFeature, sourceValue);
}
}
}
}
branch.setModelState(ModelState.FULLY_LOADED);
return branch;
}
private void setModelState(ModelState modelState) {
this.modelState = modelState;
}
public BimServerClient getBimServerClient() {
return bimServerClient;
}
public long commit(String comment) throws ServerException, UserException, PublicInterfaceNotFoundException {
if (tid == -1) {
throw new UserException("No transaction was started");
}
return bimServerClient.getLowLevelInterface().commitTransaction(tid, comment);
}
public long getJsonSerializerOid() throws ServerException, UserException, PublicInterfaceNotFoundException {
if (ifcSerializerOid == -1) {
SSerializerPluginConfiguration serializerPluginConfiguration = bimServerClient.getPluginInterface().getSerializerByPluginClassName("org.bimserver.serializers.JsonStreamingSerializerPlugin");
if (serializerPluginConfiguration != null) {
ifcSerializerOid = serializerPluginConfiguration.getOid();
} else {
throw new UserException("No JSON streaming serializer found");
}
}
return ifcSerializerOid;
}
public long getBinaryGeometrySerializerOid() throws ServerException, UserException, PublicInterfaceNotFoundException {
if (binaryGeometrySerializerOid == -1) {
SSerializerPluginConfiguration serializerPluginConfiguration = bimServerClient.getPluginInterface().getSerializerByPluginClassName("org.bimserver.serializers.binarygeometry.BinaryGeometrySerializerPlugin");
if (serializerPluginConfiguration != null) {
binaryGeometrySerializerOid = serializerPluginConfiguration.getOid();
} else {
throw new UserException("No binary geometry serializer found");
}
}
return binaryGeometrySerializerOid;
}
private void loadDeep() throws ServerException, UserException, PublicInterfaceNotFoundException, QueryException {
if (modelState != ModelState.FULLY_LOADED && modelState != ModelState.LOADING) {
modelState = ModelState.LOADING;
Query query = new Query("test", getPackageMetaData());
QueryPart queryPart = query.createQueryPart();
queryPart.setIncludeAllFields(true);
ObjectNode queryNode = new JsonQueryObjectModelConverter(query.getPackageMetaData()).toJson(query);
Long topicId = bimServerClient.getServiceInterface().download(Collections.singleton(roid), queryNode.toString(), getJsonSerializerOid(), false);
waitForDonePreparing(topicId);
try {
processDownload(topicId);
bimServerClient.getServiceInterface().cleanupLongAction(topicId);
modelState = ModelState.FULLY_LOADED;
loadGeometry();
} catch (IfcModelInterfaceException | IOException e) {
LOGGER.error("", e);
} catch (QueryException e) {
LOGGER.error("", e);
} catch (GeometryException e) {
LOGGER.error("", e);
}
}
}
private void loadGeometry() throws QueryException, ServerException, UserException, PublicInterfaceNotFoundException, IOException, GeometryException, IfcModelInterfaceException {
if (includeGeometry) {
getModelMetaData().setMinBounds(getBimServerClient().getServiceInterface().getModelMinBounds(roid));
getModelMetaData().setMaxBounds(getBimServerClient().getServiceInterface().getModelMaxBounds(roid));
Query query = new Query("test", getPackageMetaData());
QueryPart queryPart = query.createQueryPart();
Map<Long, Long> geometryInfoOidToOid = new HashMap<>();
EClass ifcProductClass = getPackageMetaData().getEClass("IfcProduct");
EStructuralFeature geometryFeature = ifcProductClass.getEStructuralFeature("geometry");
List<IdEObject> allWithSubTypes = new ArrayList<>(super.getAllWithSubTypes(ifcProductClass));
for (IdEObject ifcProduct : allWithSubTypes) {
GeometryInfo geometry = (GeometryInfo) ifcProduct.eGet(geometryFeature);
if (geometry != null) {
if (geometry.getData() == null || geometry.getData().getIndices() == null) {
queryPart.addOid(geometry.getOid());
geometryInfoOidToOid.put(geometry.getOid(), ifcProduct.getOid());
}
}
}
if (queryPart.getOids() == null) {
return;
}
EClass geometryInfoClass = getPackageMetaData().getEClassIncludingDependencies("GeometryInfo");
Include include = queryPart.createInclude();
include.addType(geometryInfoClass, false);
include.addField("data");
long serializerOid = bimServerClient.getBinaryGeometryMessagingStreamingSerializerOid();
long topicId = bimServerClient.query(query, roid, serializerOid);
// TODO use websocket notifications
waitForDonePreparing(topicId);
InputStream inputStream = bimServerClient.getDownloadData(topicId);
try {
// ByteArrayOutputStream byteArrayOutputStream = new
// ByteArrayOutputStream();
// IOUtils.copy(inputStream, byteArrayOutputStream);
processGeometryInputStream(inputStream, geometryInfoOidToOid);
} catch (Throwable e) {
e.printStackTrace();
} finally {
bimServerClient.getServiceInterface().cleanupLongAction(topicId);
}
}
}
private void waitForDonePreparing(long topicId) throws UserException, ServerException, PublicInterfaceNotFoundException {
for (int i = 0; i < 10; i++) {
SLongActionState progress = bimServerClient.getRegistry().getProgress(topicId);
if (progress != null) {
if (progress.getTitle() != null && progress.getTitle().equals("Done preparing")) {
break;
} else if (progress.getState() == SActionState.AS_ERROR) {
throw new UserException(Joiner.on(", ").join(progress.getErrors()));
}
}
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
private void processGeometryInputStream(InputStream inputStream, Map<Long, Long> geometryInfoOidToOid) throws IOException, GeometryException, IfcModelInterfaceException {
try (LittleEndianDataInputStream dataInputStream = new LittleEndianDataInputStream(inputStream)) {
boolean done = false;
while (!done) {
byte type = dataInputStream.readByte();
if (type == 0) {
String protocol = dataInputStream.readUTF();
if (!protocol.equals("BGS")) {
throw new GeometryException("Protocol != BGS (" + protocol + ")");
}
byte formatVersion = dataInputStream.readByte();
if (formatVersion != 11) {
throw new GeometryException("Unsupported version " + formatVersion + " / 11");
}
int skip = 4 - (7 % 4);
if (skip != 0 && skip != 4) {
dataInputStream.readFully(new byte[skip]);
}
for (int i = 0; i < 6; i++) {
dataInputStream.readDouble();
}
} else if (type == 5) {
dataInputStream.readFully(new byte[7]);
dataInputStream.readLong(); // roid
long geometryInfoOid = dataInputStream.readLong();
GeometryInfo geometryInfo = (GeometryInfo) get(geometryInfoOid);
if (geometryInfo == null) {
geometryInfo = create(GeometryInfo.class);
}
((IdEObjectImpl)geometryInfo).setOid(geometryInfoOid);
((IdEObjectImpl)geometryInfo).setLoadingState(State.LOADING);
add(geometryInfoOid, geometryInfo);
Long ifcProductOid = geometryInfoOidToOid.get(geometryInfoOid);
if (ifcProductOid == null) {
throw new GeometryException("Missing geometry info id: " + geometryInfoOid);
}
IdEObject ifcProduct = get(ifcProductOid);
EStructuralFeature geometryFeature = getPackageMetaData().getEClass("IfcProduct").getEStructuralFeature("geometry");
ifcProduct.eSet(geometryFeature, geometryInfo);
org.bimserver.models.geometry.Vector3f minBounds = GeometryFactory.eINSTANCE.createVector3f();
minBounds.setX(dataInputStream.readDouble());
minBounds.setY(dataInputStream.readDouble());
minBounds.setZ(dataInputStream.readDouble());
org.bimserver.models.geometry.Vector3f maxBounds = GeometryFactory.eINSTANCE.createVector3f();
maxBounds.setX(dataInputStream.readDouble());
maxBounds.setY(dataInputStream.readDouble());
maxBounds.setZ(dataInputStream.readDouble());
geometryInfo.setMinBounds(minBounds);
geometryInfo.setMaxBounds(maxBounds);
byte[] transformation = new byte[16 * 8];
dataInputStream.readFully(transformation);
geometryInfo.setTransformation(transformation);
long geometryDataOid = dataInputStream.readLong();
GeometryData geometryData = (GeometryData) get(geometryDataOid);
if (geometryData == null) {
geometryData = GeometryFactory.eINSTANCE.createGeometryData();
add(geometryDataOid, geometryData);
}
geometryInfo.setData(geometryData);
((IdEObjectImpl)geometryData).setLoadingState(State.LOADED);
} else if (type == 3) {
throw new GeometryException("Parts not supported");
} else if (type == 1) {
dataInputStream.readFully(new byte[7]);
long geometryDataOid = dataInputStream.readLong();
GeometryData geometryData = (GeometryData) get(geometryDataOid);
if (geometryData == null) {
geometryData = GeometryFactory.eINSTANCE.createGeometryData();
add(geometryDataOid, geometryData);
}
((IdEObjectImpl)geometryData).setOid(geometryDataOid);
((IdEObjectImpl)geometryData).setLoadingState(State.LOADING);
int nrIndices = dataInputStream.readInt();
byte[] indices = new byte[nrIndices * 4];
dataInputStream.readFully(indices);
geometryData.setIndices(indices);
int colorType = dataInputStream.readInt();
if (colorType == 1) {
dataInputStream.readFloat();
dataInputStream.readFloat();
dataInputStream.readFloat();
dataInputStream.readFloat();
}
int nrVertices = dataInputStream.readInt();
byte[] vertices = new byte[nrVertices * 4];
dataInputStream.readFully(vertices);
geometryData.setVertices(vertices);
int nrNormals = dataInputStream.readInt();
byte[] normals = new byte[nrNormals * 4];
dataInputStream.readFully(normals);
geometryData.setNormals(normals);
int nrMaterials = dataInputStream.readInt();
byte[] materials = new byte[nrMaterials * 4];
dataInputStream.readFully(materials);
geometryData.setMaterials(materials);
((IdEObjectImpl)geometryData).setLoadingState(State.LOADED);
} else if (type == 6) {
done = true;
} else {
throw new GeometryException("Unimplemented type: " + type);
}
}
} catch (EOFException e) {
} catch (ObjectAlreadyExistsException e) {
e.printStackTrace();
}
}
private void processDownload(Long topicId) throws UserException, ServerException, PublicInterfaceNotFoundException, IfcModelInterfaceException, IOException {
InputStream downloadData = bimServerClient.getDownloadData(topicId);
if (downloadData == null) {
throw new IfcModelInterfaceException("No InputStream to read from");
}
try {
new SharedJsonDeserializer(false).read(downloadData, this, false);
} catch (DeserializeException e) {
throw new IfcModelInterfaceException(e);
} catch (Exception e) {
throw new IfcModelInterfaceException(e);
} finally {
if (downloadData != null) {
downloadData.close();
}
}
}
@Override
public <T extends IdEObject> List<T> getAll(EClass eClass) {
if (!loadedClasses.contains(eClass.getName()) && modelState != ModelState.FULLY_LOADED) {
LOGGER.info("Loading all " + eClass.getName());
try {
modelState = ModelState.LOADING;
Query query = new Query(getPackageMetaData());
QueryPart queryPart = query.createQueryPart();
queryPart.addType(eClass, false);
if (includeGeometry && getPackageMetaData().getEClass("IfcProduct").isSuperTypeOf(eClass)) {
Include include = queryPart.createInclude();
include.addType(eClass, false);
include.addField("geometry");
}
JsonQueryObjectModelConverter converter = new JsonQueryObjectModelConverter(getPackageMetaData());
long topicId = bimServerClient.getServiceInterface().download(Collections.singleton(roid), converter.toJson(query).toString(), getJsonSerializerOid(), false);
waitForDonePreparing(topicId);
processDownload(topicId);
bimServerClient.getServiceInterface().cleanupLongAction(topicId);
loadedClasses.add(eClass.getName());
rebuildIndexPerClass(eClass);
modelState = ModelState.NONE;
} catch (Exception e) {
LOGGER.error("", e);
}
}
List<T> result = super.getAll(eClass);
try {
if (modelState != ModelState.FULLY_LOADED) {
loadGeometry();
}
} catch (ServerException e) {
e.printStackTrace();
} catch (UserException e) {
e.printStackTrace();
} catch (PublicInterfaceNotFoundException e) {
e.printStackTrace();
} catch (QueryException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (GeometryException e) {
e.printStackTrace();
} catch (IfcModelInterfaceException e) {
e.printStackTrace();
}
return result;
}
@Override
public Set<String> getGuids(EClass eClass) {
getAllWithSubTypes(eClass);
return super.getGuids(eClass);
}
@Override
public Set<String> getNames(EClass eClass) {
getAllWithSubTypes(eClass);
return super.getNames(eClass);
}
@Override
public IdEObject getByName(EClass eClass, String name) {
// TODO
return super.getByName(eClass, name);
}
@Override
public long size() {
if (cachedObjectCount == -1) {
try {
cachedObjectCount = bimServerClient.getLowLevelInterface().count(roid, "[ALL]");
} catch (Exception e) {
LOGGER.error("", e);
}
}
return cachedObjectCount;
}
@Override
public Set<Long> keySet() {
try {
loadDeep();
} catch (Exception e) {
LOGGER.error("", e);
}
return super.keySet();
}
@Override
public IdEObject get(long oid) {
IdEObject idEObject = super.get(oid);
if (idEObject == null) {
loadExplicit(oid);
return super.get(oid);
}
return idEObject;
}
public void loadExplicit(long oid) {
try {
IdEObjectImpl idEObjectImpl = (IdEObjectImpl) super.get(oid);
if (idEObjectImpl != null && !idEObjectImpl.isLoadedOrLoading()) {
idEObjectImpl.setLoadingState(State.LOADING);
modelState = ModelState.LOADING;
Query query = new Query(getPackageMetaData());
QueryPart queryPart = query.createQueryPart();
queryPart.addOid(oid);
JsonQueryObjectModelConverter converter = new JsonQueryObjectModelConverter(getPackageMetaData());
long topicId = bimServerClient.getServiceInterface().download(Collections.singleton(roid), converter.toJson(query).toString(), getJsonSerializerOid(), false);
waitForDonePreparing(topicId);
processDownload(topicId);
bimServerClient.getServiceInterface().cleanupLongAction(topicId);
idEObjectImpl.setLoadingState(State.LOADED);
modelState = ModelState.NONE;
}
} catch (Exception e) {
LOGGER.error("", e);
}
}
public IdEObject getNoFetch(long oid) {
return super.get(oid);
}
@Override
public Collection<IdEObject> getValues() {
try {
loadDeep();
} catch (Exception e) {
LOGGER.error("", e);
}
return super.getValues();
}
@Override
public <T extends IdEObject> List<T> getAllWithSubTypes(EClass eClass) {
if (!loadedClasses.contains(eClass.getName()) && modelState != ModelState.FULLY_LOADED) {
try {
modelState = ModelState.LOADING;
Query query = new Query(getPackageMetaData());
QueryPart queryPart = query.createQueryPart();
queryPart.addType(eClass, true);
if (includeGeometry && getPackageMetaData().getEClass("IfcProduct").isSuperTypeOf(eClass)) {
Include include = queryPart.createInclude();
include.addType(eClass, true);
include.addField("geometry");
}
JsonQueryObjectModelConverter converter = new JsonQueryObjectModelConverter(getPackageMetaData());
long topicId = bimServerClient.getServiceInterface().download(Collections.singleton(roid), converter.toJson(query).toString(), getJsonSerializerOid(), false);
waitForDonePreparing(topicId);
processDownload(topicId);
bimServerClient.getServiceInterface().cleanupLongAction(topicId);
for (EClass subClass : bimServerClient.getMetaDataManager().getPackageMetaData(eClass.getEPackage().getName()).getAllSubClasses(eClass)) {
loadedClasses.add(subClass.getName());
rebuildIndexPerClass(eClass);
}
loadedClasses.add(eClass.getName());
modelState = ModelState.NONE;
loadGeometry();
} catch (Exception e) {
LOGGER.error("", e);
}
}
return super.getAllWithSubTypes(eClass);
}
public Long getTransactionId() {
return tid;
}
public ModelState getModelState() {
return modelState;
}
@Override
public boolean contains(long oid) {
get(oid);
return super.contains(oid);
}
public boolean containsNoFetch(long oid) {
return super.contains(oid);
}
@Override
public boolean containsGuid(String guid) {
getByGuid(guid);
return super.containsGuid(guid);
}
@Override
public int count(EClass eClass) {
return super.count(eClass);
}
@Override
public IdEObject getByGuid(String guid) {
IdEObject idEObject = super.getByGuid(guid);
if (idEObject == null) {
try {
modelState = ModelState.LOADING;
Query query = new Query(getPackageMetaData());
QueryPart queryPart = query.createQueryPart();
queryPart.addGuid(guid);
JsonQueryObjectModelConverter converter = new JsonQueryObjectModelConverter(getPackageMetaData());
long topicId = bimServerClient.getServiceInterface().download(Collections.singleton(roid), converter.toJson(query).toString(), getJsonSerializerOid(), false);
waitForDonePreparing(topicId);
processDownload(topicId);
bimServerClient.getServiceInterface().cleanupLongAction(topicId);
modelState = ModelState.NONE;
return super.getByGuid(guid);
} catch (Exception e) {
LOGGER.error("", e);
}
}
return idEObject;
}
public <T extends IdEObject> T create(Class<T> clazz) throws IfcModelInterfaceException, ObjectAlreadyExistsException {
EClassifier eClassifier = getPackageMetaData().getEPackage().getEClassifier(clazz.getSimpleName());
if (eClassifier == null) {
eClassifier = GeometryPackage.eINSTANCE.getEClassifier(clazz.getSimpleName());
}
if (eClassifier == null) {
throw new IfcModelInterfaceException("EClass not found " + clazz);
}
return create((EClass) eClassifier);
}
@SuppressWarnings("unchecked")
@Override
public <T extends IdEObject> T create(EClass eClass, long oid) throws IfcModelInterfaceException {
IdEObjectImpl object = super.create(eClass, oid);
if (recordChanges) {
object.eAdapters().add(adapter);
}
return (T) object;
}
@SuppressWarnings("unchecked")
@Override
public <T extends IdEObject> T create(EClass eClass) throws IfcModelInterfaceException, ObjectAlreadyExistsException {
final IdEObjectImpl idEObject = (IdEObjectImpl) eClass.getEPackage().getEFactoryInstance().create(eClass);
idEObject.setModel(this);
if (recordChanges) {
idEObject.eAdapters().add(adapter);
try {
Long oid = bimServerClient.getLowLevelInterface().createObject(tid, eClass.getName(), eClass.getEStructuralFeature("GlobalId") != null);
idEObject.setOid(oid);
} catch (Exception e) {
LOGGER.error("", e);
}
add(idEObject.getOid(), idEObject);
}
return (T) idEObject;
}
@Override
public void set(IdEObject idEObject, EStructuralFeature eFeature, Object newValue) {
if (!recordChanges) {
return;
}
if (!eFeature.isMany()) {
if (getModelState() != ModelState.LOADING && ((IdEObjectImpl)idEObject).getLoadingState() != State.LOADING) {
try {
if (newValue != EStructuralFeature.Internal.DynamicValueHolder.NIL) {
LowLevelInterface lowLevelInterface = getBimServerClient().getLowLevelInterface();
if (eFeature.getName().equals("wrappedValue")) {
// Wrapped objects get the same oid as their
// "parent" object, so we know which object the
// client wants to update. That's why we can use
// idEObject.getOid() here
// We are making this crazy hack ever crazier, let's
// iterate over our parents features, and see if
// there is one matching our wrapped type...
// Seriously, when there are multiple fields of the
// same type, this fails miserably, a real fix
// should probably store the parent-oid + feature
// name in the wrapped object (requires two extra,
// volatile, fields),
// or we just don't support this (just create a new
// wrapped object too), we could even throw some
// sort of exception. Hack morally okay because it's
// client-side...
EReference foundReference = null;
if (contains(idEObject.getOid())) {
IdEObject parentObject = get(idEObject.getOid());
int found = 0;
foundReference = null;
for (EReference testReference : parentObject.eClass().getEAllReferences()) {
if (((EClass) testReference.getEType()).isSuperTypeOf(idEObject.eClass())) {
foundReference = testReference;
found++;
if (found > 1) {
throw new RuntimeException(
"Sorry, crazy hack could not resolve the right field, please let BIMserver developer know (debug info: " + parentObject.eClass().getName() + ", " + idEObject.eClass().getName() + ")");
}
}
}
if (eFeature.getEType() == EcorePackage.eINSTANCE.getEString()) {
lowLevelInterface.setWrappedStringAttribute(getTransactionId(), idEObject.getOid(), foundReference.getName(), idEObject.eClass().getName(), (String) newValue);
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getELong() || eFeature.getEType() == EcorePackage.eINSTANCE.getELongObject()) {
lowLevelInterface.setWrappedLongAttribute(getTransactionId(), idEObject.getOid(), foundReference.getName(), idEObject.eClass().getName(), (Long) newValue);
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEDouble() || eFeature.getEType() == EcorePackage.eINSTANCE.getEDoubleObject()) {
lowLevelInterface.setWrappedDoubleAttribute(getTransactionId(), idEObject.getOid(), foundReference.getName(), idEObject.eClass().getName(), (Double) newValue);
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEBoolean() || eFeature.getEType() == EcorePackage.eINSTANCE.getEBooleanObject()) {
lowLevelInterface.setWrappedBooleanAttribute(getTransactionId(), idEObject.getOid(), foundReference.getName(), idEObject.eClass().getName(), (Boolean) newValue);
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEInt() || eFeature.getEType() == EcorePackage.eINSTANCE.getEIntegerObject()) {
lowLevelInterface.setWrappedIntegerAttribute(getTransactionId(), idEObject.getOid(), foundReference.getName(), idEObject.eClass().getName(), (Integer) newValue);
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEByteArray()) {
throw new RuntimeException("Unimplemented " + eFeature.getEType().getName() + " " + newValue);
}
} else {
if (eFeature.getEType() == EcorePackage.eINSTANCE.getEString()) {
lowLevelInterface.setStringAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), (String) newValue);
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getELong() || eFeature.getEType() == EcorePackage.eINSTANCE.getELongObject()) {
lowLevelInterface.setLongAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), (Long) newValue);
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEDouble() || eFeature.getEType() == EcorePackage.eINSTANCE.getEDoubleObject()) {
lowLevelInterface.setDoubleAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), (Double) newValue);
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEBoolean() || eFeature.getEType() == EcorePackage.eINSTANCE.getEBooleanObject()) {
lowLevelInterface.setBooleanAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), (Boolean) newValue);
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEInt() || eFeature.getEType() == EcorePackage.eINSTANCE.getEIntegerObject()) {
lowLevelInterface.setIntegerAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), (Integer) newValue);
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEByteArray()) {
lowLevelInterface.setByteArrayAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), (Byte[]) newValue);
} else if (eFeature.getEType() instanceof EEnum) {
lowLevelInterface.setEnumAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), ((Enum<?>) newValue).toString());
} else if (eFeature instanceof EReference) {
if (newValue == null) {
lowLevelInterface.setReference(getTransactionId(), idEObject.getOid(), eFeature.getName(), -1L);
} else {
lowLevelInterface.setReference(getTransactionId(), idEObject.getOid(), eFeature.getName(), ((IdEObject) newValue).getOid());
}
} else {
throw new RuntimeException("Unimplemented " + eFeature.getEType().getName() + " " + newValue);
}
}
} else {
if (eFeature.getEType() == EcorePackage.eINSTANCE.getEString()) {
lowLevelInterface.setStringAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), (String) newValue);
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getELong() || eFeature.getEType() == EcorePackage.eINSTANCE.getELongObject()) {
lowLevelInterface.setLongAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), (Long) newValue);
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEDouble() || eFeature.getEType() == EcorePackage.eINSTANCE.getEDoubleObject()) {
lowLevelInterface.setDoubleAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), (Double) newValue);
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEBoolean() || eFeature.getEType() == EcorePackage.eINSTANCE.getEBooleanObject()) {
lowLevelInterface.setBooleanAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), (Boolean) newValue);
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEInt() || eFeature.getEType() == EcorePackage.eINSTANCE.getEIntegerObject()) {
lowLevelInterface.setIntegerAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), (Integer) newValue);
} else if (eFeature.getEType() == EcorePackage.eINSTANCE.getEByteArray()) {
if (newValue instanceof byte[]) {
Byte[] n = new Byte[((byte[]) newValue).length];
for (int i = 0; i < n.length; i++) {
n[i] = ((byte[]) newValue)[i];
}
newValue = n;
}
lowLevelInterface.setByteArrayAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), (Byte[]) newValue);
} else if (eFeature.getEType() instanceof EEnum) {
lowLevelInterface.setEnumAttribute(getTransactionId(), idEObject.getOid(), eFeature.getName(), ((Enum<?>) newValue).toString());
} else if (eFeature instanceof EReference) {
if (newValue == null) {
lowLevelInterface.setReference(getTransactionId(), idEObject.getOid(), eFeature.getName(), -1L);
} else {
lowLevelInterface.setReference(getTransactionId(), idEObject.getOid(), eFeature.getName(), ((IdEObject) newValue).getOid());
}
} else {
throw new RuntimeException("Unimplemented " + eFeature.getEType().getName() + " " + newValue);
}
}
}
} catch (ServiceException e) {
LOGGER.error("", e);
} catch (PublicInterfaceNotFoundException e) {
LOGGER.error("", e);
}
}
}
}
public void checkin(long poid, String comment) throws ServerException, UserException, PublicInterfaceNotFoundException {
this.fixOids(new OidProvider() {
private long c = 1;
@Override
public long newOid(EClass eClass) {
return c++;
}
});
SharedJsonSerializer sharedJsonSerializer = new SharedJsonSerializer(this, false);
SDeserializerPluginConfiguration deserializer = bimServerClient.getServiceInterface().getSuggestedDeserializerForExtension("json", poid);
bimServerClient.checkin(poid, comment, deserializer.getOid(), false, Flow.SYNC, -1, "test", new SerializerInputstream(sharedJsonSerializer));
}
public void load(IdEObject object) {
if (((IdEObjectImpl)object).getLoadingState() == State.LOADING) {
return;
}
loadExplicit(object.getOid());
}
@Override
public void remove(IdEObject object) {
try {
bimServerClient.getLowLevelInterface().removeObject(getTransactionId(), object.getOid());
} catch (PublicInterfaceNotFoundException e) {
LOGGER.error("", e);
} catch (ServerException e) {
LOGGER.error("", e);
} catch (UserException e) {
LOGGER.error("", e);
}
}
// @Override
// public void query(ObjectNode query) {
// try {
// modelState = ModelState.LOADING;
// Long downloadByTypes =
// bimServerClient.getServiceInterface().downloadByJsonQuery(Collections.singleton(roid),
// query.toString(), getJsonSerializerOid(), true);
// processDownload(downloadByTypes);
// modelState = ModelState.NONE;
// } catch (Exception e) {
// LOGGER.error("", e);
public void queryNew(Query query, IfcModelChangeListener ifcModelChangeListener) {
try {
modelState = ModelState.LOADING;
JsonQueryObjectModelConverter converter = new JsonQueryObjectModelConverter(getPackageMetaData());
Long topicId = bimServerClient.getServiceInterface().download(Collections.singleton(roid), converter.toJson(query).toString(), getJsonSerializerOid(), false);
waitForDonePreparing(topicId);
if (ifcModelChangeListener != null) {
addChangeListener(ifcModelChangeListener);
}
processDownload(topicId);
bimServerClient.getServiceInterface().cleanupLongAction(topicId);
if (ifcModelChangeListener != null) {
removeChangeListener(ifcModelChangeListener);
}
modelState = ModelState.NONE;
} catch (Exception e) {
LOGGER.error("", e);
}
}
// @Override
// public SIfcHeader getIfcHeader() {
// SIfcHeader ifcHeader = super.getIfcHeader();
// if (ifcHeader == null) {
// try {
// SRevision revision =
// bimServerClient.getServiceInterface().getRevision(roid);
// if (revision.getConcreteRevisions().size() == 1) {
// ifcHeader =
// bimServerClient.getServiceInterface().getIfcHeader(revision.getConcreteRevisions().get(0));
// if (ifcHeader != null) {
// setIfcHeader(ifcHeader);
// return ifcHeader;
// } catch (ServerException e) {
// LOGGER.error("", e);
// } catch (UserException e) {
// LOGGER.error("", e);
// } catch (PublicInterfaceNotFoundException e) {
// LOGGER.error("", e);
// return null;
} |
package uk.co.pilllogger.fragments;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.app.Fragment;
import android.support.v4.widget.SlidingPaneLayout;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.CheckBox;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.echo.holographlibrary.Bar;
import com.echo.holographlibrary.BarGraph;
import com.echo.holographlibrary.Line;
import com.echo.holographlibrary.LineGraph;
import com.echo.holographlibrary.LinePoint;
import com.echo.holographlibrary.PieGraph;
import com.echo.holographlibrary.PieSlice;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import uk.co.pilllogger.R;
import uk.co.pilllogger.adapters.AddConsumptionPillListAdapter;
import uk.co.pilllogger.adapters.ConsumptionListAdapter;
import uk.co.pilllogger.adapters.GraphPillListAdapter;
import uk.co.pilllogger.adapters.PillsListAdapter;
import uk.co.pilllogger.adapters.PillsListBaseAdapter;
import uk.co.pilllogger.helpers.GraphHelper;
import uk.co.pilllogger.helpers.Logger;
import uk.co.pilllogger.listeners.AddConsumptionClickListener;
import uk.co.pilllogger.mappers.ConsumptionMapper;
import uk.co.pilllogger.models.Consumption;
import uk.co.pilllogger.models.Pill;
import uk.co.pilllogger.repositories.ConsumptionRepository;
import uk.co.pilllogger.state.State;
import uk.co.pilllogger.tasks.GetConsumptionsTask;
import uk.co.pilllogger.tasks.GetFavouritePillsTask;
import uk.co.pilllogger.tasks.GetPillsTask;
import uk.co.pilllogger.tasks.InitTestDbTask;
import uk.co.pilllogger.tasks.InsertConsumptionTask;
import org.joda.time.DateTime;
import org.joda.time.Days;
public class MainFragment extends Fragment implements InitTestDbTask.ITaskComplete, GetConsumptionsTask.ITaskComplete, GetFavouritePillsTask.ITaskComplete,
GetPillsTask.ITaskComplete {
private static final String TAG = "MainFragment";
ListView _listView;
ViewGroup _favouriteContainer;
View _mainLayout;
HashMap<Integer, Pill> _allPills = new HashMap<Integer, Pill>();
Fragment _fragment;
Activity _activity;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
View v = inflater.inflate(R.layout.main_fragment, container, false);
_mainLayout = v;
_fragment = this;
_activity = getActivity();
Logger.v(TAG, "onCreateView Called");
//Doing this to test - will not be needed when working fully
new InitTestDbTask(this.getActivity(), this).execute();
_listView = (ListView) (v != null ? v.findViewById(R.id.main_consumption_list) : null);
_favouriteContainer = (ViewGroup) (v!=null ? v.findViewById(R.id.button_container):null);
ImageView addConsumption = (ImageView) v.findViewById(R.id.main_add);
addConsumption.setOnClickListener(new AddConsumptionClickListener(getActivity()));
if (_listView.getAdapter() != null) //Trying this to make the list refresh after adding the new consumption
((ConsumptionListAdapter)_listView.getAdapter()).notifyDataSetChanged();
return v;
}
@Override
public void initComplete() {
new GetPillsTask(this.getActivity(), this).execute();
}
@Override
public void onResume() {
super.onResume();
new GetPillsTask(this.getActivity(), this).execute();
new GetFavouritePillsTask(this.getActivity(), this).execute();
}
@Override
public void consumptionsReceived(List<Consumption> consumptions) {
if(consumptions != null && consumptions.size() > 0){
List<Consumption> grouped = ConsumptionRepository.getSingleton(getActivity()).groupConsumptions(consumptions);
_listView.setAdapter(new ConsumptionListAdapter(getActivity(), this, R.layout.consumption_list_item, grouped));
DateTime aMonthAgo = new DateTime().minusMonths(1);
Days totalDays = Days.daysBetween(aMonthAgo.withTimeAtStartOfDay(), new DateTime().withTimeAtStartOfDay().plusDays(1));
int dayCount = totalDays.getDays();
Map<Pill, SparseIntArray> xPoints = ConsumptionMapper.mapByPillAndDate(consumptions, dayCount);
View view = _mainLayout.findViewById(R.id.main_graph);
if(view instanceof LineGraph)
GraphHelper.plotLineGraph(xPoints, dayCount, (LineGraph) view);
if(view instanceof BarGraph)
GraphHelper.plotBarGraph(xPoints, dayCount, (BarGraph)view);
if(view instanceof PieGraph)
GraphHelper.plotPieChart(xPoints, dayCount, (PieGraph)view);
}
}
@Override
public void favouritePillsReceived(List<Pill> pills) {
if(_favouriteContainer == null)
return;
int children = _favouriteContainer.getChildCount();
int start = 1;
if(pills.size() == 0) //remove customise button
start = 2;
_favouriteContainer.removeViews(start, children -start);
for(Pill p : pills){
LayoutInflater layoutInflater = (LayoutInflater)getActivity().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
View v = layoutInflater.inflate(R.layout.favourite_pill, null);
final Pill pill = p;
if(p.getName().length() > 0){
TextView letter = (TextView) v.findViewById(R.id.pill_letter);
letter.setText(p.getName().substring(0,1));
letter.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Logger.v("Testing", "Pill: " + pill.getName());
Consumption consumption = new Consumption(pill, new Date());
new InsertConsumptionTask(_activity, consumption).execute();
new GetConsumptionsTask(_activity, (GetConsumptionsTask.ITaskComplete) _fragment, true).execute();
Toast.makeText(_activity, "Added " + pill.getName(), Toast.LENGTH_SHORT).show();
}
});
Logger.d(TAG, "Adding favourite for: " + p.getName());
}
_favouriteContainer.addView(v, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));
}
}
@Override
public void pillsReceived(List<Pill> pills) {
List<Integer> graphPills = State.getSingleton().getGraphPills();
if (graphPills == null) {
graphPills = new ArrayList<Integer>();
}
for(Pill p : pills){
_allPills.put(p.getId(), p);
if (!graphPills.contains(p.getId()))
graphPills.add(p.getId());
}
State.getSingleton().setGraphPills(graphPills);
final List<Pill> pillList = pills;
ListView list = (ListView) getActivity().findViewById(R.id.graph_drawer);
if (list != null){ //we need to init the adapter
GraphPillListAdapter adapter = new GraphPillListAdapter(getActivity(), R.layout.graph_pill_list, pills);
list.setAdapter(adapter);
list.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
Pill pill = pillList.get(position);
List<Integer> graphPills = State.getSingleton().getGraphPills();
CheckBox checkbox = (CheckBox)view.findViewById(R.id.graph_list_check_box);
if (checkbox.isChecked()) {
checkbox.setChecked(false);
if (graphPills.contains(pill.getId())) {
graphPills.remove((Object)pill.getId());
}
}
else {
checkbox.setChecked(true);
if (!graphPills.contains(pill.getId())) {
graphPills.add(pill.getId());
}
}
new GetConsumptionsTask(_activity, (GetConsumptionsTask.ITaskComplete) _fragment, true).execute();
}
});
}
new GetConsumptionsTask(this.getActivity(), this, false).execute();
}
} |
package com.fullmetalgalaxy.model.persist.gamelog;
import java.util.ArrayList;
import com.fullmetalgalaxy.model.EnuColor;
import com.fullmetalgalaxy.model.HexCoordinateSystem;
import com.fullmetalgalaxy.model.LandType;
import com.fullmetalgalaxy.model.Location;
import com.fullmetalgalaxy.model.RpcFmpException;
import com.fullmetalgalaxy.model.Sector;
import com.fullmetalgalaxy.model.TokenType;
import com.fullmetalgalaxy.model.constant.FmpConstant;
import com.fullmetalgalaxy.model.persist.AnBoardPosition;
import com.fullmetalgalaxy.model.persist.EbRegistration;
import com.fullmetalgalaxy.model.persist.EbToken;
import com.fullmetalgalaxy.model.persist.Game;
import com.fullmetalgalaxy.model.ressources.Messages;
/**
* @author Vincent Legendre
* Land a Freighter from Orbit to a board position.
*/
public class EbEvtLand extends AnEventPlay
{
static final long serialVersionUID = 1;
/**
* token list which as been put in graveyard after this action
*/
private ArrayList<Long> m_TokenIds = null;
public EbEvtLand()
{
super();
init();
}
@Override
public void reinit()
{
super.reinit();
this.init();
}
private void init()
{
setCost( 0 );
m_TokenIds = null;
}
@Override
public GameLogType getType()
{
return GameLogType.EvtLand;
}
@Override
public boolean canBeParallelHidden()
{
return false;
}
@Override
public AnBoardPosition getSelectedPosition(Game p_game)
{
return getPosition();
}
/* (non-Javadoc)
* @see com.fullmetalgalaxy.model.persist.AnAction#check()
*/
@Override
public void check(Game p_game) throws RpcFmpException
{
super.check(p_game);
if( getPosition().getX() == -1 )
{
// landing position isn't choose yet...
throw new RpcFmpException("");
}
if( getToken( p_game ).getType() != TokenType.Freighter
|| getToken( p_game ).getLocation() != Location.Orbit )
{
// not probable error
throw new RpcFmpException( "Only Freighter in orbit can be landed." );
}
// check that player control the token color
EbRegistration myRegistration = getMyRegistration(p_game);
assert myRegistration != null;
if( !myRegistration.getEnuColor().isColored( getToken(p_game).getColor() ) )
{
throw new RpcFmpException( errMsg().CantMoveDontControl(
Messages.getColorString( getAccountId(), getToken( p_game ).getColor() ),
Messages.getColorString( getAccountId(), myRegistration.getColor() ) ) );
}
// check freighter isn't landing on sea neither montain
// get the 4 landing hexagon
AnBoardPosition landingPosition[] = new AnBoardPosition[6];
landingPosition[0] = getPosition();
switch( landingPosition[0].getSector() )
{
case North:
case SouthEast:
case SouthWest:
landingPosition[1] = p_game.getCoordinateSystem().getNeighbor( landingPosition[0], Sector.North );
landingPosition[2] = p_game.getCoordinateSystem().getNeighbor( landingPosition[0], Sector.SouthEast );
landingPosition[3] = p_game.getCoordinateSystem().getNeighbor( landingPosition[0], Sector.SouthWest );
break;
case NorthEast:
case South:
case NorthWest:
landingPosition[1] = p_game.getCoordinateSystem().getNeighbor( landingPosition[0], Sector.NorthEast );
landingPosition[2] = p_game.getCoordinateSystem().getNeighbor( landingPosition[0], Sector.South );
landingPosition[3] = p_game.getCoordinateSystem().getNeighbor( landingPosition[0], Sector.NorthWest );
default:
// impossible error
break;
}
// check the 4 hexagon
for( int i = 0; i < 4; i++ )
{
LandType land = p_game.getLand( landingPosition[i] );
if( (land == LandType.None) || (land == LandType.Sea) || (land == LandType.Reef)
|| (land == LandType.Montain) )
{
throw new RpcFmpException( errMsg().CantLandOn(
Messages.getLandString( getAccountId(), land ) ) );
}
}
// check that freighter isn't landing close to another freighter
for( EbToken currentToken : p_game.getSetToken() )
{
if( (currentToken.getType() == TokenType.Freighter)
&& (currentToken.getLocation() == Location.Board)
&& (currentToken.getId() != getToken(p_game).getId())
&& (p_game.getCoordinateSystem().getDiscreteDistance( landingPosition[0], currentToken.getPosition() ) <= FmpConstant.minSpaceBetweenFreighter) )
{
throw new RpcFmpException( errMsg().CantLandCloser( FmpConstant.minSpaceBetweenFreighter ) );
}
}
// check that freighter isn't landing too close of map boarder
if( !p_game.getMapShape().isEWLinked() &&
(getPosition().getX() < 2 || getPosition().getX() > (p_game.getLandWidth() - 3)) )
{
throw new RpcFmpException( errMsg().CantLandTooCloseBorder() );
}
if( !p_game.getMapShape().isNSLinked() &&
(getPosition().getY() < 2 || getPosition().getY() > (p_game.getLandHeight() - 3)) )
{
throw new RpcFmpException( errMsg().CantLandTooCloseBorder() );
}
// check empty hex near landing position
HexCoordinateSystem coordinateSystem = p_game.getCoordinateSystem();
for( int i = 0; i < 6; i++ )
{
landingPosition[i] = coordinateSystem.getNeighbor( getPosition(), Sector.getFromOrdinal( i ) );
landingPosition[i] = coordinateSystem.getNeighbor( landingPosition[i],
Sector.getFromOrdinal( i ) );
}
for( int i = 0; i < 6; i++ )
{
if( p_game.getLand( landingPosition[i] ) == LandType.None )
{
throw new RpcFmpException( errMsg().CantLandTooCloseBorder() );
}
}
}
/* (non-Javadoc)
* @see com.fullmetalgalaxy.model.persist.AnAction#exec()
*/
@Override
public void exec(Game p_game) throws RpcFmpException
{
super.exec(p_game);
p_game.moveToken( getToken(p_game), getPosition() );
getToken(p_game).incVersion();
// destroy any colorless token in the deployment area
m_TokenIds = new ArrayList<Long>();
for( EbToken currentToken : p_game.getSetToken() )
{
if( (currentToken.getColor() == EnuColor.None)
&& (currentToken.getLocation() == Location.Board)
&& (p_game.getCoordinateSystem().getDiscreteDistance( getPosition(), currentToken.getPosition() ) <= FmpConstant.deployementRadius) )
{
// destroy this colorless token
m_TokenIds.add( currentToken.getId() );
p_game.moveToken( currentToken, Location.Graveyard );
currentToken.incVersion();
}
}
// add the 3 turrets
// get the 4 landing hexagon
AnBoardPosition landingPosition[] = new AnBoardPosition[4];
landingPosition[0] = getPosition();
switch( landingPosition[0].getSector() )
{
default:
case North:
case SouthEast:
case SouthWest:
landingPosition[1] = p_game.getCoordinateSystem().getNeighbor( landingPosition[0], Sector.North );
landingPosition[2] = p_game.getCoordinateSystem().getNeighbor( landingPosition[0], Sector.SouthEast );
landingPosition[3] = p_game.getCoordinateSystem().getNeighbor( landingPosition[0], Sector.SouthWest );
break;
case NorthEast:
case South:
case NorthWest:
landingPosition[1] = p_game.getCoordinateSystem().getNeighbor( landingPosition[0], Sector.NorthEast );
landingPosition[2] = p_game.getCoordinateSystem().getNeighbor( landingPosition[0], Sector.South );
landingPosition[3] = p_game.getCoordinateSystem().getNeighbor( landingPosition[0], Sector.NorthWest );
}
// unload three turrets
int index = 0;
if( getToken( p_game ).containToken() )
{
for( EbToken token : getToken( p_game ).getCopyContains() )
{
if( (token.getType() == TokenType.Turret) && (index < 3) )
{
index++;
p_game.moveToken( token, landingPosition[index] );
token.incVersion();
}
}
}
}
/* (non-Javadoc)
* @see com.fullmetalgalaxy.model.persist.AnAction#unexec()
*/
@Override
public void unexec(Game p_game) throws RpcFmpException
{
super.unexec(p_game);
// reload three turrets
for( AnBoardPosition position : getToken(p_game).getExtraPositions(p_game.getCoordinateSystem()) )
{
EbToken token = p_game.getToken( position, TokenType.Turret );
if( token != null )
{
p_game.moveToken( token, getToken(p_game) );
token.decVersion();
}
}
p_game.moveToken( getToken(p_game), Location.Orbit );
getToken(p_game).decVersion();
// this update is here only to refresh token display
p_game.updateLastTokenUpdate( null );
// put back ore on board.
if( m_TokenIds != null )
{
for( Long id : m_TokenIds )
{
EbToken token = p_game.getToken( id );
if( (token != null) && (token.getLocation() == Location.Graveyard) )
{
p_game.moveToken( token, token.getPosition() );
token.decVersion();
}
}
}
}
} |
package xyz.egie.sharetoreadingam;
import android.app.Dialog;
import android.content.Intent;
import android.graphics.Paint;
import android.net.Uri;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import org.w3c.dom.Text;
public class ShareActivity extends AppCompatActivity {
private static final String YEP = " yep ";
private static final String NOPE = " nope ";
private static final String NO_OPINION = "";
private TextView yepButton;
private TextView nopeButton;
// Stores yeps and nopes:
private String opinionText;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_share);
this.opinionText = NO_OPINION;
// TODO: Handle case where user doesn't have an email set yet
// TODO: Let user choose whether to show the YEP/NOPE section (if not, then just jump to send)
View parentView = findViewById(R.id.activity_share);
parentView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// TODO: Let the user know if the link was not posted to Reading!
finish();
}
});
this.yepButton = (TextView) findViewById(R.id.yep_button);
this.nopeButton = (TextView) findViewById(R.id.nope_button);
TextView sendButton = (TextView) findViewById(R.id.send_button);
yepButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (TextUtils.equals(opinionText, YEP)) {
// If opinion is already yep, then reset
clearOpinionUi();
opinionText = NO_OPINION;
} else {
markUiAsYep();
opinionText = YEP;
}
}
});
nopeButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (TextUtils.equals(opinionText, NOPE)) {
// If opinion is already nope, then reset
clearOpinionUi();
opinionText = NO_OPINION;
} else {
markUiAsNope();
opinionText = NOPE;
}
}
});
// Get intent, action and MIME type
Intent intent = getIntent();
String action = intent.getAction();
String type = intent.getType();
sendButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Toast.makeText(ShareActivity.this, "Opinion: " + opinionText, Toast.LENGTH_LONG).show();
finish();
}
});
if (action.equals(Intent.ACTION_SEND) && type != null) {
if (type.equals("text/plain")) {
String body = intent.getStringExtra(Intent.EXTRA_TEXT);
// postBodyToReading(body);
}
}
}
private void postBodyToReading(String body) {
Intent emailIntent = new Intent(Intent.ACTION_SENDTO);
emailIntent.setData(Uri.parse("mailto:")); // only email apps should handle this
String readingEmail = ""; // todo: store this.
// Set email to send to Reading.am address
emailIntent.putExtra(Intent.EXTRA_EMAIL, new String[]{readingEmail});
// Set email to send with link (from app that is sharing) and opinion (yep/nope)
String opinionatedString = body + this.opinionText;
emailIntent.putExtra(Intent.EXTRA_TEXT, opinionatedString);
if (emailIntent.resolveActivity(getPackageManager()) != null) {
startActivity(emailIntent);
}
}
private void markUiAsYep() {
clearOpinionUi();
nopeButton.setPaintFlags(nopeButton.getPaintFlags()| Paint.STRIKE_THRU_TEXT_FLAG);
yepButton.setSelected(true);
}
private void markUiAsNope() {
clearOpinionUi();
yepButton.setPaintFlags(nopeButton.getPaintFlags() | Paint.STRIKE_THRU_TEXT_FLAG);
nopeButton.setSelected(true);
}
private void clearOpinionUi() {
yepButton.setPaintFlags(0);
nopeButton.setPaintFlags(0);
yepButton.setSelected(false);
nopeButton.setSelected(false);
}
@Override
public void finish() {
super.finish();
// Finish up without a big ol transition:
overridePendingTransition(0, 0);
}
} |
package org.commcare.dalvik.activities;
import org.commcare.android.adapters.AppManagerAdapter;
import org.commcare.android.database.SqlStorage;
import org.commcare.android.database.global.models.ApplicationRecord;
import org.commcare.android.models.notifications.NotificationMessageFactory;
import org.commcare.android.models.notifications.NotificationMessageFactory.StockMessages;
import org.commcare.dalvik.R;
import org.commcare.dalvik.application.CommCareApp;
import org.commcare.dalvik.application.CommCareApplication;
import org.commcare.dalvik.preferences.CommCarePreferences;
import org.javarosa.core.services.locale.Localization;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.ListView;
import android.widget.Toast;
/**
* @author amstone326
*
*/
public class AppManagerActivity extends Activity {
public static final String KEY_LAUNCH_FROM_MANAGER = "from_manager";
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.app_manager);
refreshView();
}
private void refreshView() {
ListView lv = (ListView) findViewById(R.id.apps_list_view);
lv.setAdapter(new AppManagerAdapter(this, R.layout.custom_list_item_view, appRecordArray()));
}
public void onResume() {
super.onResume();
ListView lv = (ListView) findViewById(R.id.apps_list_view);
lv.setAdapter(new AppManagerAdapter(this,
android.R.layout.simple_list_item_1, appRecordArray()));
}
private ApplicationRecord[] appRecordArray() {
SqlStorage<ApplicationRecord> appList = CommCareApplication._().getInstalledAppRecords();
ApplicationRecord[] appArray = new ApplicationRecord[appList.getNumRecords()];
int index = 0;
for (ApplicationRecord r : appList) {
appArray[index++] = r;
}
return appArray;
}
public ApplicationRecord getAppAtIndex(int index) {
ApplicationRecord[] currentApps = appRecordArray();
if (index < 0 || index >= currentApps.length) {
System.out.println("WARNING: attempting to get ApplicationRecord from ManagerActivity at invalid index");
return null;
} else {
System.out.println("returning ApplicationRecord at index " + index);
return currentApps[index];
}
}
public void installAppClicked(View v) {
Intent i = new Intent(getApplicationContext(), CommCareSetupActivity.class);
i.putExtra(KEY_LAUNCH_FROM_MANAGER, true);
this.startActivityForResult(i, CommCareHomeActivity.INIT_APP);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent intent) {
switch (requestCode) {
case CommCareHomeActivity.INIT_APP:
if (resultCode == RESULT_OK) {
if(!CommCareApplication._().getCurrentApp().areResourcesValidated()){
Intent i = new Intent(this, CommCareVerificationActivity.class);
i.putExtra(KEY_LAUNCH_FROM_MANAGER, true);
this.startActivityForResult(i, CommCareHomeActivity.MISSING_MEDIA_ACTIVITY);
} else {
Toast.makeText(this, "New app installed successfully", Toast.LENGTH_LONG).show();
}
} else {
Toast.makeText(this, "No app was installed!", Toast.LENGTH_LONG).show();
}
break;
case CommCareHomeActivity.UPGRADE_APP:
if(resultCode == RESULT_CANCELED) {
//This might actually be bad, but try to go about your business
//The onResume() will take us to the screen
return;
} else if(resultCode == RESULT_OK) {
//set flag that we should autoupdate on next login
SharedPreferences preferences = CommCareApplication._().getCurrentApp().getAppPreferences();
preferences.edit().putBoolean(CommCarePreferences.AUTO_TRIGGER_UPDATE,true);
//The onResume() will take us to the screen
return;
}
break;
case CommCareHomeActivity.MISSING_MEDIA_ACTIVITY:
if (resultCode == RESULT_CANCELED) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Media Not Verified");
builder.setMessage(R.string.skipped_verification_warning)
.setPositiveButton("OK", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
dialog.dismiss();
}
});
AlertDialog dialog = builder.create();
dialog.show();
}
else if (resultCode == RESULT_OK) {
Toast.makeText(this, "Media Validated!", Toast.LENGTH_LONG).show();
}
break;
}
}
/** Uninstalls the selected app **/
public void uninstallSelected(View v) {
String appId = (String) v.getContentDescription();
ApplicationRecord selected = CommCareApplication._().getRecordById(appId);
CommCareApplication._().getGlobalStorage(ApplicationRecord.class).remove(selected.getID());
refreshView();
}
/** If the app is not archived, sets it to archived (i.e. still installed but
* not visible to users); If it is archived, sets it to unarchived **/
public void toggleArchiveSelected(View v) {
System.out.println("toggleArchiveSelected called");
String appId = (String) v.getContentDescription();
ApplicationRecord selected = CommCareApplication._().getRecordById(appId);
selected.setArchiveStatus(!selected.isArchived());
Button b = (Button) v;
if (selected.isArchived()) {
System.out.println("AppManagerActivity setting button to 'Unarchive'");
b.setText("Unarchive");
} else {
System.out.println("AppManagerAdapter setting button to 'Archive'");
b.setText("Archive");
}
}
/** Opens the MM verification activity for the selected app **/
public void verifyResourcesForSelected(View v) {
String appId = (String) v.getContentDescription();
ApplicationRecord selected = CommCareApplication._().getRecordById(appId);
CommCareApplication._().initializeAppResources(new CommCareApp(selected));
Intent i = new Intent(this, CommCareVerificationActivity.class);
i.putExtra(KEY_LAUNCH_FROM_MANAGER, true);
this.startActivityForResult(i, CommCareHomeActivity.MISSING_MEDIA_ACTIVITY);
}
/** Conducts an update for the selected app **/
public void updateSelected(View v) {
String appId = (String) v.getContentDescription();
ApplicationRecord selected = CommCareApplication._().getRecordById(appId);
CommCareApplication._().initializeAppResources(new CommCareApp(selected));
Intent i = new Intent(getApplicationContext(), CommCareSetupActivity.class);
SharedPreferences prefs = CommCareApplication._().getCurrentApp().getAppPreferences();
String ref = prefs.getString("default_app_server", null);
i.putExtra(CommCareSetupActivity.KEY_PROFILE_REF, ref);
i.putExtra(CommCareSetupActivity.KEY_UPGRADE_MODE, true);
startActivityForResult(i,CommCareHomeActivity.UPGRADE_APP);
}
} |
package edu.neu.ccs.pyramid.eval;
import edu.neu.ccs.pyramid.dataset.MultiLabel;
import edu.neu.ccs.pyramid.dataset.MultiLabelClfDataSet;
import edu.neu.ccs.pyramid.multilabel_classification.MultiLabelClassifier;
import edu.neu.ccs.pyramid.multilabel_classification.Utils;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class MAP {
/**
* compute mean average precision over given labels
* @param classifier
* @param dataSet
* @return
*/
public static double map(MultiLabelClassifier.ClassProbEstimator classifier, MultiLabelClfDataSet dataSet, List<Integer> labels){
if (classifier.getNumClasses()!=dataSet.getNumClasses()){
throw new IllegalArgumentException("classifier.getNumClasses()!=dataSet.getNumClasses()");
}
int numData = dataSet.getNumDataPoints();
double[][] probs = new double[dataSet.getNumDataPoints()][dataSet.getNumClasses()];
IntStream.range(0, dataSet.getNumDataPoints()).parallel()
.forEach(i->probs[i] = classifier.predictClassProbs(dataSet.getRow(i)));
double sum = 0;
for (int l: labels){
int[] binaryLabels = new int[numData];
double[] marginals = new double[numData];
for (int i=0;i<numData;i++){
if (dataSet.getMultiLabels()[i].matchClass(l)){
binaryLabels[i] = 1;
}
marginals[i] = probs[i][l];
}
double averagePrecision = AveragePrecision.averagePrecision(binaryLabels, marginals);
// System.out.println("AP for label "+l+"="+averagePrecision);
sum += averagePrecision;
}
return sum/labels.size();
}
/**
* label MAP; marginals are provided by the classifiers
* @param classifier
* @param dataSet
* @return
*/
public static double map(MultiLabelClassifier.ClassProbEstimator classifier, MultiLabelClfDataSet dataSet){
List<Integer> labels = IntStream.range(0, dataSet.getNumClasses()).boxed().collect(Collectors.toList());
return map(classifier, dataSet, labels);
}
/**
* label MAP
* marginals are estimated through support combinations
* @param classifier
* @param dataSet
* @return
*/
public static double mapBySupport(MultiLabelClassifier.AssignmentProbEstimator classifier, MultiLabelClfDataSet dataSet, List<MultiLabel> combinations){
if (classifier.getNumClasses()!=dataSet.getNumClasses()){
throw new IllegalArgumentException("classifier.getNumClasses()!=dataSet.getNumClasses()");
}
int numData = dataSet.getNumDataPoints();
double[][] probs = new double[dataSet.getNumDataPoints()][dataSet.getNumClasses()];
IntStream.range(0, dataSet.getNumDataPoints()).parallel()
.forEach(i->{
double[] comProbs = classifier.predictAssignmentProbs(dataSet.getRow(i),combinations);
probs[i] = Utils.marginals(combinations, comProbs, classifier.getNumClasses());
});
double sum = 0;
for (int l=0;l<dataSet.getNumClasses();l++){
int[] binaryLabels = new int[numData];
double[] marginals = new double[numData];
for (int i=0;i<numData;i++){
if (dataSet.getMultiLabels()[i].matchClass(l)){
binaryLabels[i] = 1;
}
marginals[i] = probs[i][l];
}
double averagePrecision = AveragePrecision.averagePrecision(binaryLabels, marginals);
System.out.println("AP for label "+l+"="+averagePrecision);
sum += averagePrecision;
}
return sum/dataSet.getNumClasses();
}
public static double instanceMAP(MultiLabelClassifier.ClassProbEstimator classifier, MultiLabelClfDataSet dataSet){
return IntStream.range(0, dataSet.getNumDataPoints()).parallel().mapToDouble(i->{
int[] binaryLabels = new int[classifier.getNumClasses()];
MultiLabel multiLabel = dataSet.getMultiLabels()[i];
for (int l:multiLabel.getMatchedLabels()) {
binaryLabels[l] = 1;
}
double[] probs = classifier.predictClassProbs(dataSet.getRow(i));
return AveragePrecision.averagePrecision(binaryLabels, probs);
}).average().getAsDouble();
}
public static double instanceMAP(MultiLabelClassifier.AssignmentProbEstimator classifier, MultiLabelClfDataSet dataSet, List<MultiLabel> combinations){
return IntStream.range(0, dataSet.getNumDataPoints()).parallel().mapToDouble(i->{
int[] binaryLabels = new int[classifier.getNumClasses()];
MultiLabel multiLabel = dataSet.getMultiLabels()[i];
for (int l:multiLabel.getMatchedLabels()) {
binaryLabels[l] = 1;
}
double[] comProbs = classifier.predictAssignmentProbs(dataSet.getRow(i),combinations);
double[] probs = Utils.marginals(combinations, comProbs, classifier.getNumClasses());
return AveragePrecision.averagePrecision(binaryLabels, probs);
}).average().getAsDouble();
}
public static double instanceMAP(double[][] marginals, MultiLabelClfDataSet dataSet){
return IntStream.range(0, dataSet.getNumDataPoints()).parallel().mapToDouble(i->{
int[] binaryLabels = new int[dataSet.getNumClasses()];
MultiLabel multiLabel = dataSet.getMultiLabels()[i];
for (int l:multiLabel.getMatchedLabels()) {
binaryLabels[l] = 1;
}
return AveragePrecision.averagePrecision(binaryLabels, marginals[i]);
}).average().getAsDouble();
}
} |
package hudson.model;
import hudson.security.ACL;
import org.acegisecurity.Authentication;
import org.acegisecurity.context.SecurityContextHolder;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.kohsuke.graph_layouter.Layout;
import org.kohsuke.graph_layouter.Navigator;
import org.kohsuke.graph_layouter.Direction;
import javax.servlet.ServletOutputStream;
import javax.imageio.ImageIO;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.Stack;
import java.io.IOException;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Rectangle;
import java.awt.Graphics2D;
import java.awt.Color;
import java.awt.Point;
import java.awt.HeadlessException;
import java.awt.FontMetrics;
import java.awt.geom.AffineTransform;
import java.awt.image.BufferedImage;
/**
* Maintains the build dependencies between {@link AbstractProject}s
* for efficient dependency computation.
*
* <p>
* The "master" data of dependencies are owned/persisted/maintained by
* individual {@link AbstractProject}s, but because of that, it's relatively
* slow to compute backward edges.
*
* <p>
* This class builds the complete bi-directional dependency graph
* by collecting information from all {@link AbstractProject}s.
*
* <p>
* Once built, {@link DependencyGraph} is immutable, and every time
* there's a change (which is relatively rare), a new instance
* will be created. This eliminates the need of synchronization.
*
* @see Hudson#getDependencyGraph()
* @author Kohsuke Kawaguchi
*/
public final class DependencyGraph implements Comparator<AbstractProject> {
private Map<AbstractProject, List<Dependency>> forward = new HashMap<AbstractProject, List<Dependency>>();
private Map<AbstractProject, List<Dependency>> backward = new HashMap<AbstractProject, List<Dependency>>();
private boolean built;
/**
* Builds the dependency graph.
*/
public DependencyGraph() {
// Set full privileges while computing to avoid missing any projects the current user cannot see
Authentication saveAuth = SecurityContextHolder.getContext().getAuthentication();
try {
SecurityContextHolder.getContext().setAuthentication(ACL.SYSTEM);
for( AbstractProject p : Hudson.getInstance().getAllItems(AbstractProject.class) )
p.buildDependencyGraph(this);
forward = finalize(forward);
backward = finalize(backward);
built = true;
} finally {
SecurityContextHolder.getContext().setAuthentication(saveAuth);
}
}
/**
* Special constructor for creating an empty graph
*/
private DependencyGraph(boolean dummy) {
forward = backward = Collections.emptyMap();
built = true;
}
/**
* Gets all the immediate downstream projects (IOW forward edges) of the given project.
*
* @return
* can be empty but never null.
*/
public List<AbstractProject> getDownstream(AbstractProject p) {
return get(forward,p,false);
}
/**
* Gets all the immediate upstream projects (IOW backward edges) of the given project.
*
* @return
* can be empty but never null.
*/
public List<AbstractProject> getUpstream(AbstractProject p) {
return get(backward,p,true);
}
private List<AbstractProject> get(Map<AbstractProject, List<Dependency>> map, AbstractProject src, boolean up) {
List<Dependency> v = map.get(src);
if(v==null) return Collections.emptyList();
List<AbstractProject> result = new ArrayList<AbstractProject>(v.size());
for (Dependency d : v) result.add(up ? d.getUpstreamProject() : d.getDownstreamProject());
return result;
}
/**
* @since 1.341
*/
public List<Dependency> getDownstreamDependencies(AbstractProject p) {
return get(forward,p);
}
/**
* @since 1.341
*/
public List<Dependency> getUpstreamDependencies(AbstractProject p) {
return get(backward,p);
}
private List<Dependency> get(Map<AbstractProject, List<Dependency>> map, AbstractProject src) {
List<Dependency> v = map.get(src);
if(v!=null) return v;
else return Collections.emptyList();
}
/**
* @deprecated since 1.341; use {@link #addDependency(Dependency)}
*/
@Deprecated
public void addDependency(AbstractProject upstream, AbstractProject downstream) {
addDependency(new Dependency(upstream,downstream));
}
/**
* Called during the dependency graph build phase to add a dependency edge.
*/
public void addDependency(Dependency dep) {
if(built)
throw new IllegalStateException();
add(forward,dep.getUpstreamProject(),dep);
add(backward,dep.getDownstreamProject(),dep);
}
/**
* @deprecated since 1.341
*/
@Deprecated
public void addDependency(AbstractProject upstream, Collection<? extends AbstractProject> downstream) {
for (AbstractProject p : downstream)
addDependency(upstream,p);
}
/**
* @deprecated since 1.341
*/
@Deprecated
public void addDependency(Collection<? extends AbstractProject> upstream, AbstractProject downstream) {
for (AbstractProject p : upstream)
addDependency(p,downstream);
}
/**
* Lists up {@link DependecyDeclarer} from the collection and let them builds dependencies.
*/
public void addDependencyDeclarers(AbstractProject upstream, Collection<?> possibleDependecyDeclarers) {
for (Object o : possibleDependecyDeclarers) {
if (o instanceof DependecyDeclarer) {
DependecyDeclarer dd = (DependecyDeclarer) o;
dd.buildDependencyGraph(upstream,this);
}
}
}
/**
* Returns true if a project has a non-direct dependency to another project.
* <p>
* A non-direct dependency is a path of dependency "edge"s from the source to the destination,
* where the length is greater than 1.
*/
public boolean hasIndirectDependencies(AbstractProject src, AbstractProject dst) {
Set<AbstractProject> visited = new HashSet<AbstractProject>();
Stack<AbstractProject> queue = new Stack<AbstractProject>();
queue.addAll(getDownstream(src));
queue.remove(dst);
while(!queue.isEmpty()) {
AbstractProject p = queue.pop();
if(p==dst)
return true;
if(visited.add(p))
queue.addAll(getDownstream(p));
}
return false;
}
/**
* Gets all the direct and indirect upstream dependencies of the given project.
*/
public Set<AbstractProject> getTransitiveUpstream(AbstractProject src) {
return getTransitive(backward,src,true);
}
/**
* Gets all the direct and indirect downstream dependencies of the given project.
*/
public Set<AbstractProject> getTransitiveDownstream(AbstractProject src) {
return getTransitive(forward,src,false);
}
private Set<AbstractProject> getTransitive(Map<AbstractProject, List<Dependency>> direction, AbstractProject src, boolean up) {
Set<AbstractProject> visited = new HashSet<AbstractProject>();
Stack<AbstractProject> queue = new Stack<AbstractProject>();
queue.add(src);
while(!queue.isEmpty()) {
AbstractProject p = queue.pop();
for (AbstractProject child : get(direction,p,up)) {
if(visited.add(child))
queue.add(child);
}
}
return visited;
}
private void add(Map<AbstractProject, List<Dependency>> map, AbstractProject key, Dependency dep) {
List<Dependency> set = map.get(key);
if(set==null) {
set = new ArrayList<Dependency>();
map.put(key,set);
}
for (ListIterator<Dependency> it = set.listIterator(); it.hasNext();) {
Dependency d = it.next();
// Check for existing edge that connects the same two projects:
if (d.getUpstreamProject()==dep.getUpstreamProject() && d.getDownstreamProject()==dep.getDownstreamProject()) {
if (d.equals(dep))
return; // identical with existing edge
if (d instanceof DependencyGroup)
((DependencyGroup)d).add(dep);
else
it.set(new DependencyGroup(d,dep));
return;
}
}
// Otherwise add to list:
set.add(dep);
}
private Map<AbstractProject, List<Dependency>> finalize(Map<AbstractProject, List<Dependency>> m) {
for (Entry<AbstractProject, List<Dependency>> e : m.entrySet()) {
Collections.sort( e.getValue(), NAME_COMPARATOR );
e.setValue( Collections.unmodifiableList(e.getValue()) );
}
return Collections.unmodifiableMap(m);
}
/**
* Experimental visualization of project dependencies.
*/
public void doGraph( StaplerRequest req, StaplerResponse rsp ) throws IOException {
Hudson.getInstance().checkPermission(Hudson.ADMINISTER);
try {
// creates a dummy graphics just so that we can measure font metrics
BufferedImage emptyImage = new BufferedImage(1,1, BufferedImage.TYPE_INT_RGB );
Graphics2D graphics = emptyImage.createGraphics();
graphics.setFont(FONT);
final FontMetrics fontMetrics = graphics.getFontMetrics();
// TODO: timestamp check
Layout<AbstractProject> layout = new Layout<AbstractProject>(new Navigator<AbstractProject>() {
public Collection<AbstractProject> vertices() {
// only include projects that have some dependency
List<AbstractProject> r = new ArrayList<AbstractProject>();
for (AbstractProject p : Hudson.getInstance().getAllItems(AbstractProject.class)) {
if(!getDownstream(p).isEmpty() || !getUpstream(p).isEmpty())
r.add(p);
}
return r;
}
public Collection<AbstractProject> edge(AbstractProject p) {
return getDownstream(p);
}
public Dimension getSize(AbstractProject p) {
int w = fontMetrics.stringWidth(p.getDisplayName()) + MARGIN*2;
return new Dimension(w, fontMetrics.getHeight() + MARGIN*2);
}
}, Direction.LEFTRIGHT);
Rectangle area = layout.calcDrawingArea();
area.grow(4,4); // give it a bit of margin
BufferedImage image = new BufferedImage(area.width, area.height, BufferedImage.TYPE_INT_RGB );
Graphics2D g2 = image.createGraphics();
g2.setTransform(AffineTransform.getTranslateInstance(-area.x,-area.y));
g2.setPaint(Color.WHITE);
g2.fill(area);
g2.setFont(FONT);
g2.setPaint(Color.BLACK);
for( AbstractProject p : layout.vertices() ) {
final Point sp = center(layout.vertex(p));
for (AbstractProject q : layout.edges(p)) {
Point cur=sp;
for( Point pt : layout.edge(p,q) ) {
g2.drawLine(cur.x, cur.y, pt.x, pt.y);
cur=pt;
}
final Point ep = center(layout.vertex(q));
g2.drawLine(cur.x, cur.y, ep.x, ep.y);
}
}
int diff = fontMetrics.getAscent()+fontMetrics.getLeading()/2;
for( AbstractProject p : layout.vertices() ) {
Rectangle r = layout.vertex(p);
g2.setPaint(Color.WHITE);
g2.fillRect(r.x, r.y, r.width, r.height);
g2.setPaint(Color.BLACK);
g2.drawRect(r.x, r.y, r.width, r.height);
g2.drawString(p.getDisplayName(), r.x+MARGIN, r.y+MARGIN+ diff);
}
rsp.setContentType("image/png");
ServletOutputStream os = rsp.getOutputStream();
ImageIO.write(image, "PNG", os);
os.close();
} catch(HeadlessException e) {
// not available. send out error message
rsp.sendRedirect2(req.getContextPath()+"/images/headless.png");
}
}
private Point center(Rectangle r) {
return new Point(r.x+r.width/2,r.y+r.height/2);
}
private static final Font FONT = new Font("TimesRoman", Font.PLAIN, 10);
/**
* Margins between the project name and its bounding box.
*/
private static final int MARGIN = 4;
private static final Comparator<Dependency> NAME_COMPARATOR = new Comparator<Dependency>() {
public int compare(Dependency lhs, Dependency rhs) {
int cmp = lhs.getUpstreamProject().getName().compareTo(rhs.getUpstreamProject().getName());
return cmp != 0 ? cmp : lhs.getDownstreamProject().getName().compareTo(rhs.getDownstreamProject().getName());
}
};
public static final DependencyGraph EMPTY = new DependencyGraph(false);
/**
* Compare to Projects based on the topological order defined by this Dependency Graph
*/
public int compare(AbstractProject o1, AbstractProject o2) {
Set<AbstractProject> o1sdownstreams = getTransitiveDownstream(o1);
Set<AbstractProject> o2sdownstreams = getTransitiveDownstream(o2);
if (o1sdownstreams.contains(o2)) {
if (o2sdownstreams.contains(o1)) return 0; else return 1;
} else {
if (o2sdownstreams.contains(o1)) return -1; else return 0;
}
}
/**
* Represents an edge in the dependency graph.
* @since 1.341
*/
public static class Dependency {
private AbstractProject upstream, downstream;
public Dependency(AbstractProject upstream, AbstractProject downstream) {
this.upstream = upstream;
this.downstream = downstream;
}
public AbstractProject getUpstreamProject() {
return upstream;
}
public AbstractProject getDownstreamProject() {
return downstream;
}
/**
* Decide whether build should be triggered and provide any Actions for the build.
* Default implementation always returns true (for backward compatibility), and
* adds no Actions. Subclasses may override to control how/if the build is triggered.
* @param build Build of upstream project that just completed
* @param listener For any error/log output
* @param actions Add Actions for the triggered build to this list; never null
* @return True to trigger a build of the downstream project
*/
public boolean shouldTriggerBuild(AbstractBuild build, TaskListener listener,
List<Action> actions) {
return true;
}
/**
* Does this method point to itself?
*/
public boolean pointsItself() {
return upstream==downstream;
}
@Override
public boolean equals(Object obj) {
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
final Dependency that = (Dependency) obj;
return this.upstream == that.upstream || this.downstream == that.downstream;
}
@Override
public int hashCode() {
int hash = 7;
hash = 23 * hash + this.upstream.hashCode();
hash = 23 * hash + this.downstream.hashCode();
return hash;
}
}
/**
* Collect multiple dependencies between the same two projects.
*/
private static class DependencyGroup extends Dependency {
private Set<Dependency> group = new LinkedHashSet<Dependency>();
DependencyGroup(Dependency first, Dependency second) {
super(first.getUpstreamProject(), first.getDownstreamProject());
group.add(first);
group.add(second);
}
void add(Dependency next) {
group.add(next);
}
@Override
public boolean shouldTriggerBuild(AbstractBuild build, TaskListener listener,
List<Action> actions) {
List<Action> check = new ArrayList<Action>();
for (Dependency d : group) {
if (d.shouldTriggerBuild(build, listener, check)) {
actions.addAll(check);
return true;
} else
check.clear();
}
return false;
}
}
} |
package org.atlasapi.output;
import java.util.List;
import java.util.Optional;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import org.atlasapi.annotation.Annotation;
import org.atlasapi.application.ApplicationSources;
import org.atlasapi.channel.Region;
import org.atlasapi.output.annotation.OutputAnnotation;
import org.atlasapi.query.annotation.ActiveAnnotations;
import org.atlasapi.query.common.QueryContext;
import org.atlasapi.query.common.Resource;
import org.atlasapi.query.common.useraware.UserAccountsAwareQueryContext;
import org.atlasapi.query.common.useraware.UserAwareQueryContext;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Contains state required during the output of a response. Not thread-safe
*/
public class OutputContext {
private final Logger log = LoggerFactory.getLogger(OutputContext.class);
public static OutputContext valueOf(QueryContext standard) {
return new OutputContext(
standard.getAnnotations(),
standard.getApplicationSources(),
standard.getRequest(),
null
);
}
public static OutputContext valueOf(QueryContext standard, Region region) {
return new OutputContext(
standard.getAnnotations(),
standard.getApplicationSources(),
standard.getRequest(),
region
);
}
public static OutputContext valueOf(UserAwareQueryContext standard) {
return new OutputContext(
standard.getAnnotations(),
standard.getApplicationSources(),
standard.getRequest(),
null
);
}
public static OutputContext valueOf(UserAccountsAwareQueryContext standard) {
return new OutputContext(
standard.getAnnotations(),
standard.getApplicationSources(),
standard.getRequest(),
null
);
}
private final ActiveAnnotations annotations;
private final ApplicationSources applicationSources;
private final List<Resource> resources;
private final HttpServletRequest request;
private final Optional<Region> region;
public OutputContext(
ActiveAnnotations activeAnnotations,
ApplicationSources applicationSources,
HttpServletRequest request,
@Nullable Region region
) {
this.annotations = checkNotNull(activeAnnotations);
this.applicationSources = checkNotNull(applicationSources);
this.resources = Lists.newLinkedList();
this.request = checkNotNull(request);
this.region = Optional.ofNullable(region);
}
public final OutputContext startResource(Resource resource) {
resources.add(resource);
return this;
}
public final OutputContext endResource() {
resources.remove(resources.size() - 1);
return this;
}
public ImmutableSet<Annotation> getActiveAnnotations() {
return annotations.forPath(resources);
}
public <T> List<OutputAnnotation<? super T>> getAnnotations(AnnotationRegistry<T> registry) {
ImmutableSet<Annotation> active = annotations.forPath(resources);
if (active == null || active.isEmpty()) {
return registry.defaultAnnotations();
}
return registry.activeAnnotations(active);
}
public ApplicationSources getApplicationSources() {
return this.applicationSources;
}
public HttpServletRequest getRequest() {
return request;
}
public Optional<Region> getRegion() {
return region;
}
} |
package i5.las2peer.p2p;
import i5.las2peer.api.p2p.ServiceNameVersion;
import i5.las2peer.api.security.*;
import i5.las2peer.classLoaders.ClassManager;
import i5.las2peer.logging.L2pLogger;
import i5.las2peer.persistency.SharedStorage;
import i5.las2peer.registry.*;
import i5.las2peer.registry.data.RegistryConfiguration;
import i5.las2peer.registry.data.UserData;
import i5.las2peer.registry.exceptions.BadEthereumCredentialsException;
import i5.las2peer.registry.exceptions.EthereumException;
import i5.las2peer.security.AgentImpl;
import i5.las2peer.security.EthereumAgent;
import java.net.InetAddress;
import java.util.List;
// TODO: send stop announcements on service stop / node shutdown
// actually, don't do that here, instead extend NodeServiceCache
// otherwise there would be a lot of redundancy
/**
* Node implementation that extends the FreePastry-based node with
* access to an Ethereum blockchain-based service and user registry.
*
* Access to the registry is encapsulated in the package
* {@link i5.las2peer.registry}. (The actual Ethereum client is run
* separately, but see there for details.)
*
* The operator of an EthereumNode must have an Ethereum wallet (which
* is a JSON file containing a possibly encrypted key pair, much like
* las2peer's agent XML files, as well as an Ethereum address).
* The Ether funds of that wallet are used to announce service
* deployments, i.e., services running at this node.
* The same account should be used for mining in the Ethereum client,
* so that new Ether is added.
*
* Operations triggered by agents, such as users registering and
* releasing services, are paid for by them.
*
* @see EthereumAgent
*/
public class EthereumNode extends PastryNodeImpl {
private ReadWriteRegistryClient registryClient;
private String ethereumWalletPath;
private String ethereumWalletPassword;
private static L2pLogger logger = L2pLogger.getInstance(EthereumNode.class);
/**
* @param ethereumWalletPath path to standard Ethereum wallet file
* belonging to the Node operator
* @param ethereumWalletPassword password for wallet (may be null
* or empty, but obviously that's not
* recommended)
* @see PastryNodeImpl#PastryNodeImpl(ClassManager, boolean, InetAddress, Integer, List, SharedStorage.STORAGE_MODE, String, Long)
*/
public EthereumNode(ClassManager classManager, boolean useMonitoringObserver, InetAddress pastryBindAddress,
Integer pastryPort, List<String> bootstrap, SharedStorage.STORAGE_MODE storageMode,
String storageDir, Long nodeIdSeed, String ethereumWalletPath, String ethereumWalletPassword) {
super(classManager, useMonitoringObserver, pastryBindAddress, pastryPort, bootstrap, storageMode, storageDir,
nodeIdSeed);
this.ethereumWalletPath = ethereumWalletPath;
this.ethereumWalletPassword = ethereumWalletPassword;
}
@Override
protected void launchSub() throws NodeException {
setStatus(NodeStatus.STARTING);
RegistryConfiguration conf = new RegistryConfiguration();
try {
registryClient = new ReadWriteRegistryClient(conf,
CredentialUtils.fromWallet(ethereumWalletPath, ethereumWalletPassword));
} catch (BadEthereumCredentialsException e) {
throw new NodeException("Bad Ethereum credentials. Cannot start.", e);
}
super.launchSub();
}
/**
* Announce deployment of the service associated with this service
* agent using the service registry.
* @param serviceAgent agent of service being started
*/
public void announceServiceDeployment(ServiceAgent serviceAgent) {
announceServiceDeployment(serviceAgent.getServiceNameVersion());
}
/**
* Announce deployment of the service instance.
* @param nameVersion service being started
*/
public void announceServiceDeployment(ServiceNameVersion nameVersion) {
String serviceName = nameVersion.getPackageName();
String className = nameVersion.getSimpleClassName();
int versionMajor = nameVersion.getVersion().getMajor();
int versionMinor = nameVersion.getVersion().getMinor();
int versionPatch = nameVersion.getVersion().getSub();
String nodeId = getPastryNode().getId().toStringFull();
try {
registryClient.announceDeployment(serviceName, className, versionMajor, versionMinor, versionPatch, nodeId);
} catch (EthereumException e) {
logger.severe("Error while announcing deployment: " + e);
}
}
/**
* Announce end of deployment (i.e., shutdown) of the service
* associated with this service agent using the service registry.
* @param serviceAgent agent of service being shut down
*/
public void announceServiceDeploymentEnd(ServiceAgent serviceAgent) {
announceServiceDeploymentEnd(serviceAgent.getServiceNameVersion());
}
/**
* Announce end of deployment (i.e., shutdown) of the service
* instance.
* @param nameVersion service being shut down
*/
private void announceServiceDeploymentEnd(ServiceNameVersion nameVersion) {
String serviceName = nameVersion.getPackageName();
String className = nameVersion.getSimpleClassName();
int versionMajor = nameVersion.getVersion().getMajor();
int versionMinor = nameVersion.getVersion().getMinor();
int versionPatch = nameVersion.getVersion().getSub();
String nodeId = getPastryNode().getId().toStringFull();
try {
registryClient.announceDeploymentEnd(serviceName, className, versionMajor, versionMinor, versionPatch, nodeId);
} catch (EthereumException e) {
logger.severe("Error while announcing end of deployment: " + e);
}
}
@Override
public AgentImpl getAgent(String id) throws AgentException {
AgentImpl agent = super.getAgent(id);
if (agent instanceof EthereumAgent) {
try {
if (agentMatchesUserRegistryData((EthereumAgent) agent)) {
}
} catch (EthereumException e) {
throw new AgentException("Error while comparing stored agent to user registry. Aborting out of caution.");
}
}
return agent;
}
@Override
public void storeAgent(AgentImpl agent) throws AgentException {
super.storeAgent(agent);
if (agent instanceof EthereumAgent) {
try {
registerAgentInBlockchain((EthereumAgent) agent);
} catch (AgentException|EthereumException e) {
throw new AgentException("Problem storing Ethereum agent", e);
}
}
}
// Note: Unfortunately the term "register" is also used for storing
// the agent data in the shared storage in some parts of the code
// base. So "registerAgent" is definitely ambiguous.
private void registerAgentInBlockchain(EthereumAgent ethereumAgent) throws AgentException, EthereumException {
String name = ethereumAgent.getLoginName();
if (registryClient.usernameIsAvailable(name)) {
ethereumAgent.getRegistryClient().registerUser(name, ethereumAgent.getIdentifier());
} else if (!registryClient.usernameIsValid(name)) {
// this should probably be checked during creation too
throw new AgentException("Agent login name is not valid for registry smart contracts.");
} else if (agentMatchesUserRegistryData(ethereumAgent)) {
// already registered, but ID and address match
// this is fine, I guess
} else {
throw new AgentAlreadyExistsException("Agent username is already taken in blockchain user registry and details do NOT match.");
}
}
private boolean agentMatchesUserRegistryData(EthereumAgent agent) throws EthereumException {
UserData userInBlockchain = registryClient.getUser(agent.getLoginName());
if (userInBlockchain == null) {
return false;
} else {
return userInBlockchain.getOwnerAddress().equals(agent.getEthereumAddress())
&& userInBlockchain.getAgentId().equals(agent.getIdentifier());
}
}
/** @return registry client using this agent's credentials */
public ReadWriteRegistryClient getRegistryClient() {
return registryClient;
}
} |
package com.jmex.model.ogrexml.anim;
import com.jme.math.Matrix4f;
import com.jme.math.Vector3f;
import java.util.Map;
import com.jme.scene.Controller;
import com.jme.scene.state.GLSLShaderObjectsState;
import com.jme.scene.state.RenderState;
import com.jme.util.export.InputCapsule;
import com.jme.util.export.JMEExporter;
import com.jme.util.export.JMEImporter;
import com.jme.util.export.OutputCapsule;
import com.jme.util.export.Savable;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.util.Collection;
public class MeshAnimationController extends Controller implements Savable {
private static final long serialVersionUID = -2412532346418342259L;
/**
* If true, hardware skinning will not be used even if supported.
*/
private static final boolean forceSWskinning = true;
/**
* List of targets which this controller effects.
*/
private OgreMesh[] targets;
/**
* Skeleton object must contain corresponding data for the targets' weight buffers.
*/
private Skeleton skeleton;
/**
* List of animations, bone or vertex based.
*/
private Map<String, Animation> animationMap;
/**
* The currently playing animation.
*/
private Animation animation;
private float time = 0f;
/**
* True if the mesh data should be reset to bind pose every frame.
* This only refers to mesh data, the skeleton must always be reset to bind pose each frame.
*/
private boolean resetToBindEveryFrame = false;
/**
* Frameskip LOD option
*/
private int framesToSkip = 0;
private int curFrame = 0;
public MeshAnimationController(OgreMesh[] meshes,
Skeleton skeleton,
Map<String, Animation> anims){
this.setRepeatType(RT_WRAP);
this.skeleton = skeleton;
this.animationMap = anims;
this.targets = meshes;
// find out which meshes need to have bind pose data saved
for (int i = 0; i < targets.length; i++){
// does this mesh has any pose/morph animation tracks?
for (Animation anim : animationMap.values()){
MeshAnimation manim = anim.getMeshAnimation();
BoneAnimation banim = anim.getBoneAnimation();
if (manim != null){
for (Track t : manim.getTracks()){
if (t.getTargetMeshIndex() == i){
targets[i].clearBindPose();
targets[i].saveCurrentToBindPose();
break;
}
}
}
if (banim != null && !isHardwareSkinning()){
targets[i].clearBindPose();
targets[i].saveCurrentToBindPose();
break;
}
}
if (targets[i].getWeightBuffer() != null)
targets[i].getWeightBuffer().initializeWeights();
}
if (isHardwareSkinning()){
assignShaderLogic();
}
reset();
}
/**
* Copy constructor. The mesh data has to be unique, and copied through OgreMesh.cloneFromMesh.
* The rest is handled automatically by this call.
*/
public MeshAnimationController(OgreMesh[] meshes, MeshAnimationController sourceControl){
this.setRepeatType(RT_WRAP);
this.skeleton = new Skeleton(sourceControl.skeleton);
this.animationMap = sourceControl.animationMap;
this.targets = meshes;
if (isHardwareSkinning()){
assignShaderLogic();
}
reset();
}
/**
* Returns a bone with the specified name.
* Use this method to gain access to the bone,
* to manually control it's transforms.
*/
public Bone getBone(String name){
return skeleton.getBone(name);
}
/**
* Sets the currently active animation.
* Use the animation name "<bind>" to set the model into bind pose.
*
* @returns true if the animation has been successfuly set. False if no such animation exists.
*/
public boolean setAnimation(String name){
if (name.equals("<bind>")){
reset();
return true;
}
animation = animationMap.get(name);
if (animation == null)
return false;
resetToBind();
resetToBindEveryFrame = animation.hasMeshAnimation() || !isHardwareSkinning();
time = 0;
return true;
}
/**
* Returns the length of the animation in seconds. Returns -1 if the animation is not defined.
*/
public float getAnimationLength(String name){
Animation anim = animationMap.get(name);
if (anim == null)
return -1;
return anim.getLength();
}
/**
* @return The name of the currently active animation
*/
public String getActiveAnimation(){
if (animation == null)
return "<bind>";
return animation.getName();
}
/**
* @deprecated The name of this method incorrectly implies that a List
* is returned. Use the method getAnimationNames instead.
* @see #getAnimationNames();
*/
@Deprecated
public Collection<String> getList(){
return getAnimationNames();
}
/**
* @return Collection of list of all animations that are defined
*/
public Collection<String> getAnimationNames(){
return animationMap.keySet();
}
/**
* Enables frameskip LOD.
* This technique is mostly only effective when software skinning is used.
*
* @param framesToSkip One frame will be played out of the framesToSkip number.
*/
public void setFrameSkip(int framesToSkip){
if (this.framesToSkip != framesToSkip)
this.curFrame = 0;
this.framesToSkip = framesToSkip;
}
/**
* @deprecated Use setCurTime
* @see #setCurTime(float)
*/
public void setTime(float time){
setCurTime(time);
}
/**
* Sets the time of the animation.
* If it's greater than getAnimationLength(getActiveAnimation()),
* the time will be appropriately clamped/wraped depending on the repeatMode.
*/
public void setCurTime(float time){
this.time = time;
}
Skeleton getSkeleton(){
return skeleton;
}
OgreMesh[] getMeshList(){
return targets;
}
void reset(){
resetToBind();
skeleton.getRoot().reset();
skeleton.getRoot().update();
resetToBindEveryFrame = false;
animation = null;
time = 0;
}
void resetToBind(){
for (int i = 0; i < targets.length; i++){
if (targets[i].hasBindPose()){
targets[i].restoreBindPose();
}
}
}
private void assignShaderLogic(){
SkinningShaderLogic logic = new SkinningShaderLogic();
for (OgreMesh target : targets){
GLSLShaderObjectsState glsl = (GLSLShaderObjectsState) target.getRenderState(RenderState.RS_GLSL_SHADER_OBJECTS);
if (glsl == null){
glsl = BoneAnimationLoader.createSkinningShader(skeleton.getBoneCount(),
target.getWeightBuffer().maxWeightsPerVert);
target.setRenderState(glsl);
}
glsl.setShaderDataLogic(logic);
}
}
/**
* @return True if hardware skinning will be used.
*/
public boolean isHardwareSkinning(){
return !forceSWskinning && GLSLShaderObjectsState.isSupported();
}
private void softwareSkinUpdate(OgreMesh mesh){
Vector3f vt = new Vector3f();
Vector3f nm = new Vector3f();
Vector3f resultVert = new Vector3f();
Vector3f resultNorm = new Vector3f();
Matrix4f offsetMatrices[] = skeleton.computeSkinningMatrices();
// NOTE: This code assumes the vertex buffer is in bind pose
// resetToBind() has been called this frame
FloatBuffer vb = mesh.getVertexBuffer();
vb.rewind();
FloatBuffer nb = mesh.getNormalBuffer();
nb.rewind();
// get boneIndexes and weights for mesh
ByteBuffer ib = mesh.getWeightBuffer().indexes;
FloatBuffer wb = mesh.getWeightBuffer().weights;
int maxWeightsPerVert = mesh.getWeightBuffer().maxWeightsPerVert;
int fourMinusMaxWeights = 4 - maxWeightsPerVert;
ib.rewind();
wb.rewind();
// iterate vertices and apply skinning transform for each effecting bone
for (int vert = 0; vert < mesh.getVertexCount(); vert++){
vt.x = vb.get();
vt.y = vb.get();
vt.z = vb.get();
nm.x = nb.get();
nm.y = nb.get();
nm.z = nb.get();
resultVert.x = resultVert.y = resultVert.z = 0;
resultNorm.x = resultNorm.y = resultNorm.z = 0;
for (int w = 0; w < maxWeightsPerVert; w++){
float weight = wb.get();
Matrix4f mat = offsetMatrices[ib.get()];
resultVert.x += (mat.m00 * vt.x + mat.m01 * vt.y + mat.m02 * vt.z + mat.m03) * weight;
resultVert.y += (mat.m10 * vt.x + mat.m11 * vt.y + mat.m12 * vt.z + mat.m13) * weight;
resultVert.z += (mat.m20 * vt.x + mat.m21 * vt.y + mat.m22 * vt.z + mat.m23) * weight;
// temp.x = vertex.x;
// temp.y = vertex.y;
// temp.z = vertex.z;
// mat.mult(temp, temp);
// resultVert.x += temp.x * weight;
// resultVert.y += temp.y * weight;
// resultVert.z += temp.z * weight;
resultNorm.x += (nm.x * mat.m00 + nm.y * mat.m01 + nm.z * mat.m02) * weight;
resultNorm.y += (nm.x * mat.m10 + nm.y * mat.m11 + nm.z * mat.m12) * weight;
resultNorm.z += (nm.x * mat.m20 + nm.y * mat.m21 + nm.z * mat.m22) * weight;
// temp.set(normal);
//mat.rotateVect(temp);
// resultNorm.x += temp.x * weight;
// resultNorm.y += temp.y * weight;
// resultNorm.z += temp.z * weight;
}
ib.position(ib.position()+fourMinusMaxWeights);
wb.position(wb.position()+fourMinusMaxWeights);
// overwrite vertex with transformed pos
vb.position(vb.position()-3);
vb.put(resultVert.x).put(resultVert.y).put(resultVert.z);
nb.position(nb.position()-3);
nb.put(resultNorm.x).put(resultNorm.y).put(resultNorm.z);
}
vb.flip();
nb.flip();
mesh.setHasDirtyVertices(true);
mesh.updateModelBound();
}
@Override
public void update(float tpf) {
if (!isActive() || animation == null)
return;
// do clamping/wrapping of time
if (time < 0f){
switch (getRepeatType()){
case RT_CLAMP:
time = 0f;
break;
case RT_CYCLE:
time = 0f;
break;
case RT_WRAP:
time = animation.getLength() - time;
break;
}
}else if (time > animation.getLength()){
switch (getRepeatType()){
case RT_CLAMP:
time = animation.getLength();
break;
case RT_CYCLE:
time = animation.getLength();
break;
case RT_WRAP:
time = time - animation.getLength();
break;
}
}
if (framesToSkip > 0){
// check frame skipping
curFrame++;
if (curFrame != framesToSkip){
time += tpf * getSpeed();
return;
}else{
curFrame = 0;
}
}
if (resetToBindEveryFrame)
resetToBind();
if (animation.hasBoneAnimation()){
skeleton.getRoot().reset();
}
animation.setTime(time, targets, skeleton);
if (animation.hasBoneAnimation()){
skeleton.getRoot().update();
if (!isHardwareSkinning()){
// here update the targets verticles if no hardware skinning supported
// if hardware skinning is supported, the matrices and weight buffer
// will be sent by the SkinningShaderLogic object assigned to the shader
for (int i = 0; i < targets.length; i++){
softwareSkinUpdate(targets[i]);
}
}
}
time += tpf * getSpeed();
}
public float getCurTime() { return time; }
/**
* Used only for Saving/Loading models (all parameters of the non-default
* constructor are restored from the saved model, but the object must be
* constructed beforehand)
*/
public MeshAnimationController() {
}
public void write(JMEExporter e) throws IOException {
super.write(e);
OutputCapsule output = e.getCapsule(this);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(bos);
oos.writeObject(animationMap);
oos.flush();
oos.close();
bos.close();
// Convert the animation map to a byte array:
byte[] data = bos.toByteArray();
// Then save it as such
output.write(data, "MeshAnimationControllerData", null);
output.write(targets, "targets[]", null);
output.write(skeleton, "skeleton", null);
}
@SuppressWarnings("unchecked")
public void read(JMEImporter e) throws IOException {
super.read(e);
InputCapsule input = e.getCapsule(this);
byte[] data = input.readByteArray("MeshAnimationControllerData", null);
ByteArrayInputStream bis = new ByteArrayInputStream(data);
ObjectInputStream ois = new ObjectInputStream(bis);
try {
animationMap = (Map<String, Animation>) ois.readObject();
} catch (ClassNotFoundException e1) {
throw new RuntimeException(e1);
}
Savable[] targetsAsSavable = input.readSavableArray("targets[]", null);
skeleton = (Skeleton) input.readSavable("skeleton", null);
targets = new OgreMesh[targetsAsSavable.length];
int i = 0;
for (Savable s : targetsAsSavable)
targets[i++] = (OgreMesh) s;
}
} |
package com.lwz.android.viewholderadapter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import android.content.Context;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
/**
* ViewHodler
* <p> ViewHolder AdapterView
*
* <p> ViewHolder {@link ViewHolder}
* @author lwz
*
* @param <T> List
*/
public abstract class ViewHolderAdapter<T> extends BaseAdapter {
Context mContext;
List<T> mData;
int mLayoutRes;
View mCurrentConvertView;
/**
* {@link #update(Collection)} {@link #append(Collection)}
* @param context
* @param layoutRes
*/
public ViewHolderAdapter(Context context, int layoutRes) {
this(context, new ArrayList<T>(), layoutRes);
}
public ViewHolderAdapter(Context context, List<T> data, int layoutRes) {
mContext = context;
mData = data;
mLayoutRes = layoutRes;
}
@Override
public int getCount() {
return mData.size();
}
@Override
public T getItem(int position) {
return mData.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
public List<T> getData() {
return mData;
}
/**
*
* <p>NOTE:, {@link List #clear()} {@link List #addAll(Collection)}
* @param newData
*/
public synchronized void update(Collection<? extends T> newData) {
mData.clear();
if( newData != null ) {
mData.addAll(newData);
}
notifyDataSetChanged();
}
/**
*
* @param newData
*/
public void replaceOriginData(List<T> newData) {
mData = (List<T>) newData;
notifyDataSetChanged();
}
/**
*
* <p>NOTE:
* @param appendData
*/
public synchronized void append(Collection<? extends T> appendData) {
if( appendData == null || appendData.isEmpty() ) {
return;
}
mData.addAll(appendData);
notifyDataSetChanged();
}
/**
*
* @param item
*/
public synchronized void add(T item) {
mData.add(item);
notifyDataSetChanged();
}
public void clear() {
mData.clear();
notifyDataSetChanged();
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if( convertView == null ) {
convertView = View.inflate(mContext, mLayoutRes, null);
}
mCurrentConvertView = convertView;
bindData(position, convertView, getItem(position));
bindData(position, getItem(position));
return convertView;
}
@Deprecated
protected void bindData(int pos, View convertView, T itemData) {
}
/**
* {@link #getViewFromHolder(int)} View
* @param pos
* @param itemData
*/
abstract protected void bindData(int pos, T itemData);
@Deprecated
public <K extends View> K getViewFromHolder( View convertView, int id ) {
return ViewHolder.getView(convertView, id);
}
public <K extends View> K getViewFromHolder(int id ) {
return ViewHolder.getView(mCurrentConvertView, id);
}
} |
package org.chromium.base;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.res.AssetManager;
import android.os.AsyncTask;
import android.preference.PreferenceManager;
import android.util.Log;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Set;
import java.util.List;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.regex.Pattern;
/**
* Handles extracting the necessary resources bundled in an APK and moving them to a location on
* the file system accessible from the native code.
*/
public class ResourceExtractor {
private static final String LOGTAG = "ResourceExtractor";
private static final String LAST_LANGUAGE = "Last language";
private static final String PAK_FILENAMES = "Pak filenames";
private static final String ICU_DATA_FILENAME = "icudtl.dat";
private static String[] sMandatoryPaks = null;
private static ResourceIntercepter sIntercepter = null;
// By default, we attempt to extract a pak file for the users
// current device locale. Use setExtractImplicitLocale() to
// change this behavior.
private static boolean sExtractImplicitLocalePak = true;
public interface ResourceIntercepter {
Set<String> getInterceptableResourceList();
InputStream interceptLoadingForResource(String resource);
}
private class ExtractTask extends AsyncTask<Void, Void, Void> {
private static final int BUFFER_SIZE = 16 * 1024;
public ExtractTask() {
}
@Override
protected Void doInBackground(Void... unused) {
final File outputDir = getOutputDir();
if (!outputDir.exists() && !outputDir.mkdirs()) {
Log.e(LOGTAG, "Unable to create pak resources directory!");
return null;
}
String timestampFile = checkPakTimestamp(outputDir);
if (timestampFile != null) {
deleteFiles();
}
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(
mContext.getApplicationContext());
HashSet<String> filenames = (HashSet<String>) prefs.getStringSet(
PAK_FILENAMES, new HashSet<String>());
String currentLocale = LocaleUtils.getDefaultLocale();
String currentLanguage = currentLocale.split("-", 2)[0];
if (prefs.getString(LAST_LANGUAGE, "").equals(currentLanguage)
&& filenames.size() >= sMandatoryPaks.length) {
boolean filesPresent = true;
for (String file : filenames) {
if (!new File(outputDir, file).exists()) {
filesPresent = false;
break;
}
}
if (filesPresent) return null;
} else {
prefs.edit().putString(LAST_LANGUAGE, currentLanguage).apply();
}
StringBuilder p = new StringBuilder();
for (String mandatoryPak : sMandatoryPaks) {
if (p.length() > 0) p.append('|');
p.append("\\Q" + mandatoryPak + "\\E");
}
if (sExtractImplicitLocalePak) {
if (p.length() > 0) p.append('|');
// As well as the minimum required set of .paks above, we'll also add all .paks that
// we have for the user's currently selected language.
p.append(currentLanguage);
p.append("(-\\w+)?\\.pak");
}
Pattern paksToInstall = Pattern.compile(p.toString());
AssetManager manager = mContext.getResources().getAssets();
try {
// Loop through every asset file that we have in the APK, and look for the
// ones that we need to extract by trying to match the Patterns that we
// created above.
byte[] buffer = null;
String[] files = manager.list("");
if (sIntercepter != null) {
Set<String> filesIncludingInterceptableFiles =
sIntercepter.getInterceptableResourceList();
if (filesIncludingInterceptableFiles != null &&
!filesIncludingInterceptableFiles.isEmpty()) {
for (String file : files) {
filesIncludingInterceptableFiles.add(file);
}
files = new String[filesIncludingInterceptableFiles.size()];
filesIncludingInterceptableFiles.toArray(files);
}
}
for (String file : files) {
if (!paksToInstall.matcher(file).matches()) {
continue;
}
boolean isICUData = file.equals(ICU_DATA_FILENAME);
File output = new File(isICUData ? getAppDataDir() : outputDir, file);
if (output.exists()) {
continue;
}
InputStream is = null;
OutputStream os = null;
try {
if (sIntercepter != null) {
is = sIntercepter.interceptLoadingForResource(file);
}
if (is == null) is = manager.open(file);
os = new FileOutputStream(output);
Log.i(LOGTAG, "Extracting resource " + file);
if (buffer == null) {
buffer = new byte[BUFFER_SIZE];
}
int count = 0;
while ((count = is.read(buffer, 0, BUFFER_SIZE)) != -1) {
os.write(buffer, 0, count);
}
os.flush();
// Ensure something reasonable was written.
if (output.length() == 0) {
throw new IOException(file + " extracted with 0 length!");
}
if (!isICUData) {
filenames.add(file);
} else {
// icudata needs to be accessed by a renderer process.
output.setReadable(true, false);
}
} finally {
try {
if (is != null) {
is.close();
}
} finally {
if (os != null) {
os.close();
}
}
}
}
} catch (IOException e) {
// TODO(benm): See crbug/152413.
// Try to recover here, can we try again after deleting files instead of
// returning null? It might be useful to gather UMA here too to track if
// this happens with regularity.
Log.w(LOGTAG, "Exception unpacking required pak resources: " + e.getMessage());
deleteFiles();
return null;
}
// Finished, write out a timestamp file if we need to.
if (timestampFile != null) {
try {
new File(outputDir, timestampFile).createNewFile();
} catch (IOException e) {
// Worst case we don't write a timestamp, so we'll re-extract the resource
// paks next start up.
Log.w(LOGTAG, "Failed to write resource pak timestamp!");
}
}
// TODO(yusufo): Figure out why remove is required here.
prefs.edit().remove(PAK_FILENAMES).apply();
prefs.edit().putStringSet(PAK_FILENAMES, filenames).apply();
return null;
}
// Looks for a timestamp file on disk that indicates the version of the APK that
// the resource paks were extracted from. Returns null if a timestamp was found
// and it indicates that the resources match the current APK. Otherwise returns
// a String that represents the filename of a timestamp to create.
// Note that we do this to avoid adding a BroadcastReceiver on
// android.content.Intent#ACTION_PACKAGE_CHANGED as that causes process churn
// on (re)installation of *all* APK files.
private String checkPakTimestamp(File outputDir) {
final String timestampPrefix = "pak_timestamp-";
PackageManager pm = mContext.getPackageManager();
PackageInfo pi = null;
try {
pi = pm.getPackageInfo(mContext.getPackageName(), 0);
} catch (PackageManager.NameNotFoundException e) {
return timestampPrefix;
}
if (pi == null) {
return timestampPrefix;
}
String expectedTimestamp = timestampPrefix + pi.versionCode + "-" + pi.lastUpdateTime;
String[] timestamps = outputDir.list(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.startsWith(timestampPrefix);
}
});
if (timestamps.length != 1) {
// If there's no timestamp, nuke to be safe as we can't tell the age of the files.
// If there's multiple timestamps, something's gone wrong so nuke.
return expectedTimestamp;
}
if (!expectedTimestamp.equals(timestamps[0])) {
return expectedTimestamp;
}
// timestamp file is already up-to date.
return null;
}
}
private final Context mContext;
private ExtractTask mExtractTask;
private static ResourceExtractor sInstance;
public static ResourceExtractor get(Context context) {
if (sInstance == null) {
sInstance = new ResourceExtractor(context);
}
return sInstance;
}
/**
* Specifies the .pak files that should be extracted from the APK's asset resources directory
* and moved to {@link #getOutputDirFromContext(Context)}.
* @param mandatoryPaks The list of pak files to be loaded. If no pak files are
* required, pass a single empty string.
*/
public static void setMandatoryPaksToExtract(String... mandatoryPaks) {
assert (sInstance == null || sInstance.mExtractTask == null)
: "Must be called before startExtractingResources is called";
sMandatoryPaks = mandatoryPaks;
}
/**
* Allow embedders to intercept the resource loading process. Embedders may
* want to load paks from res/raw instead of assets, since assets are not
* supported in Android library project.
* @param intercepter The instance of intercepter which provides the files list
* to intercept and the inputstream for the files it wants to intercept with.
*/
public static void setResourceIntercepter(ResourceIntercepter intercepter) {
assert (sInstance == null || sInstance.mExtractTask == null)
: "Must be called before startExtractingResources is called";
sIntercepter = intercepter;
}
/**
* By default the ResourceExtractor will attempt to extract a pak resource for the users
* currently specified locale. This behavior can be changed with this function and is
* only needed by tests.
* @param extract False if we should not attempt to extract a pak file for
* the users currently selected locale and try to extract only the
* pak files specified in sMandatoryPaks.
*/
@VisibleForTesting
public static void setExtractImplicitLocaleForTesting(boolean extract) {
assert (sInstance == null || sInstance.mExtractTask == null)
: "Must be called before startExtractingResources is called";
sExtractImplicitLocalePak = extract;
}
/**
* Marks all the 'pak' resources, packaged as assets, for extraction during
* running the tests.
*/
@VisibleForTesting
public void setExtractAllPaksForTesting() {
List<String> pakFileAssets = new ArrayList<String>();
AssetManager manager = mContext.getResources().getAssets();
try {
String[] files = manager.list("");
for (String file : files) {
if (file.endsWith(".pak")) pakFileAssets.add(file);
}
} catch (IOException e) {
Log.w(LOGTAG, "Exception while accessing assets: " + e.getMessage(), e);
}
setMandatoryPaksToExtract(pakFileAssets.toArray(new String[pakFileAssets.size()]));
}
private ResourceExtractor(Context context) {
mContext = context.getApplicationContext();
}
public void waitForCompletion() {
if (shouldSkipPakExtraction()) {
return;
}
assert mExtractTask != null;
try {
mExtractTask.get();
} catch (CancellationException e) {
// Don't leave the files in an inconsistent state.
deleteFiles();
} catch (ExecutionException e2) {
deleteFiles();
} catch (InterruptedException e3) {
deleteFiles();
}
}
/**
* This will extract the application pak resources in an
* AsyncTask. Call waitForCompletion() at the point resources
* are needed to block until the task completes.
*/
public void startExtractingResources() {
if (mExtractTask != null) {
return;
}
if (shouldSkipPakExtraction()) {
return;
}
mExtractTask = new ExtractTask();
mExtractTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
private File getAppDataDir() {
return new File(PathUtils.getDataDirectory(mContext));
}
private File getOutputDir() {
return new File(getAppDataDir(), "paks");
}
/**
* Pak files (UI strings and other resources) should be updated along with
* Chrome. A version mismatch can lead to a rather broken user experience.
* The ICU data (icudtl.dat) is less version-sensitive, but still can
* lead to malfunction/UX misbehavior. So, we regard failing to update them
* as an error.
*/
private void deleteFiles() {
File icudata = new File(getAppDataDir(), ICU_DATA_FILENAME);
if (icudata.exists() && !icudata.delete()) {
Log.e(LOGTAG, "Unable to remove the icudata " + icudata.getName());
}
File dir = getOutputDir();
if (dir.exists()) {
File[] files = dir.listFiles();
for (File file : files) {
if (!file.delete()) {
Log.e(LOGTAG, "Unable to remove existing resource " + file.getName());
}
}
}
}
/**
* Pak extraction not necessarily required by the embedder; we allow them to skip
* this process if they call setMandatoryPaksToExtract with a single empty String.
*/
private static boolean shouldSkipPakExtraction() {
// Must call setMandatoryPaksToExtract before beginning resource extraction.
assert sMandatoryPaks != null;
return sMandatoryPaks.length == 1 && "".equals(sMandatoryPaks[0]);
}
} |
package com.mebigfatguy.fbcontrib.collect;
import java.util.Set;
import org.apache.bcel.Constants;
import org.apache.bcel.classfile.AnnotationEntry;
import org.apache.bcel.classfile.Code;
import org.apache.bcel.classfile.JavaClass;
import org.apache.bcel.classfile.Method;
import com.mebigfatguy.fbcontrib.utils.UnmodifiableSet;
import edu.umd.cs.findbugs.BugReporter;
import edu.umd.cs.findbugs.BytecodeScanningDetector;
import edu.umd.cs.findbugs.NonReportingDetector;
import edu.umd.cs.findbugs.ba.ClassContext;
public class CollectStatistics extends BytecodeScanningDetector implements NonReportingDetector {
private static final Set<String> COMMON_METHOD_SIGS = UnmodifiableSet.create(
//@formatter:off
"\\<init\\>\\(\\)V",
"toString\\(\\)Ljava/lang/String;",
"hashCode\\(\\)I",
"clone\\(\\).*",
"values\\(\\).*",
"main\\(\\[Ljava/lang/String;\\)V"
//@formatter:on
);
private int numMethodCalls;
private boolean modifiesState;
private boolean classHasAnnotation;
public CollectStatistics(@SuppressWarnings("unused") BugReporter bugReporter) {
Statistics.getStatistics().clear();
}
@Override
public void visitClassContext(ClassContext classContext) {
JavaClass cls = classContext.getJavaClass();
AnnotationEntry[] annotations = cls.getAnnotationEntries();
classHasAnnotation = (annotations != null) && (annotations.length > 0);
super.visitClassContext(classContext);
}
@Override
public void visitCode(Code obj) {
numMethodCalls = 0;
modifiesState = false;
byte[] code = obj.getCode();
if (code != null) {
super.visitCode(obj);
String clsName = getClassName();
Method method = getMethod();
int accessFlags = method.getAccessFlags();
MethodInfo mi = Statistics.getStatistics().addMethodStatistics(clsName, getMethodName(), getMethodSig(), accessFlags, obj.getLength(),
numMethodCalls);
if (clsName.contains("$") || ((accessFlags & (ACC_ABSTRACT | ACC_INTERFACE | ACC_ANNOTATION)) != 0)) {
mi.addCallingAccess(Constants.ACC_PUBLIC);
} else if ((accessFlags & Constants.ACC_PRIVATE) == 0) {
if (isAssociationedWithAnnotations(method)) {
mi.addCallingAccess(Constants.ACC_PUBLIC);
} else {
String methodSig = getMethodName() + getMethodSig();
for (String sig : COMMON_METHOD_SIGS) {
if (methodSig.matches(sig)) {
mi.addCallingAccess(Constants.ACC_PUBLIC);
break;
}
}
}
}
mi.setModifiesState(modifiesState);
}
}
@Override
public void sawOpcode(int seen) {
switch (seen) {
case INVOKEVIRTUAL:
case INVOKEINTERFACE:
case INVOKESPECIAL:
case INVOKESTATIC:
case INVOKEDYNAMIC:
numMethodCalls++;
break;
case PUTSTATIC:
case PUTFIELD:
modifiesState = true;
break;
default:
break;
}
}
private boolean isAssociationedWithAnnotations(Method m) {
if (classHasAnnotation) {
return true;
}
AnnotationEntry[] annotations = m.getAnnotationEntries();
return (annotations != null) && (annotations.length > 0);
}
} |
package com.mebigfatguy.fbcontrib.detect;
import java.util.Arrays;
import java.util.BitSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.bcel.Constants;
import org.apache.bcel.Repository;
import org.apache.bcel.classfile.Code;
import org.apache.bcel.classfile.Constant;
import org.apache.bcel.classfile.ConstantDouble;
import org.apache.bcel.classfile.ConstantMethodref;
import org.apache.bcel.classfile.ConstantNameAndType;
import org.apache.bcel.classfile.ConstantPool;
import org.apache.bcel.classfile.ConstantString;
import org.apache.bcel.classfile.ConstantValue;
import org.apache.bcel.classfile.Field;
import org.apache.bcel.classfile.JavaClass;
import org.apache.bcel.classfile.LocalVariable;
import org.apache.bcel.classfile.LocalVariableTable;
import org.apache.bcel.generic.Type;
import com.mebigfatguy.fbcontrib.utils.BugType;
import com.mebigfatguy.fbcontrib.utils.CodeByteUtils;
import com.mebigfatguy.fbcontrib.utils.OpcodeUtils;
import com.mebigfatguy.fbcontrib.utils.RegisterUtils;
import com.mebigfatguy.fbcontrib.utils.TernaryPatcher;
import com.mebigfatguy.fbcontrib.utils.Values;
import edu.umd.cs.findbugs.BugInstance;
import edu.umd.cs.findbugs.BugReporter;
import edu.umd.cs.findbugs.BytecodeScanningDetector;
import edu.umd.cs.findbugs.OpcodeStack;
import edu.umd.cs.findbugs.OpcodeStack.CustomUserValue;
import edu.umd.cs.findbugs.ba.ClassContext;
import edu.umd.cs.findbugs.visitclass.LVTHelper;
/**
* looks for silly bugs that are simple but do not fit into one large pattern.
*/
@CustomUserValue
public class SillynessPotPourri extends BytecodeScanningDetector
{
private static final Set<String> collectionInterfaces = new HashSet<String>();
static {
collectionInterfaces.add("java/util/Collection");
collectionInterfaces.add("java/util/List");
collectionInterfaces.add("java/util/Set");
collectionInterfaces.add("java/util/SortedSet");
collectionInterfaces.add("java/util/Map");
collectionInterfaces.add("java/util/SortedMap");
}
private static final Set<String> oddMissingEqualsClasses = new HashSet<String>();
static {
oddMissingEqualsClasses.add("java.lang.StringBuffer");
oddMissingEqualsClasses.add("java.lang.StringBuilder");
}
private static final String LITERAL = "literal";
private static final Pattern APPEND_PATTERN = Pattern.compile("append:([0-9]+):(.*)");
private static JavaClass calendarClass;
static {
try {
calendarClass = Repository.lookupClass("java/util/Calendar");
} catch (ClassNotFoundException cnfe) {
calendarClass = null;
}
}
private static Map<String, Integer> methodsThatAreSillyOnStringLiterals = new HashMap<String, Integer>();
static {
methodsThatAreSillyOnStringLiterals.put("toLowerCase()Ljava/lang/String;", Values.ZERO);
methodsThatAreSillyOnStringLiterals.put("toUpperCase()Ljava/lang/String;", Values.ZERO);
methodsThatAreSillyOnStringLiterals.put("toLowerCase(Ljava/util/Locale;)Ljava/lang/String;", Values.ONE);
methodsThatAreSillyOnStringLiterals.put("toUpperCase(Ljava/util/Locale;)Ljava/lang/String;", Values.ONE);
methodsThatAreSillyOnStringLiterals.put("trim()Ljava/lang/String;", Values.ZERO);
methodsThatAreSillyOnStringLiterals.put("isEmpty()Z", Values.ZERO);
}
private final BugReporter bugReporter;
private final Set<String> toStringClasses;
private OpcodeStack stack;
private int lastPCs[];
private int lastOpcode;
private int lastReg;
private boolean lastIfEqWasBoolean;
private boolean lastLoadWasString;
/** branch targets, to a set of branch instructions */
private Map<Integer, BitSet> branchTargets;
private Set<String> staticConstants;
/**
* constructs a SPP detector given the reporter to report bugs on
* @param bugReporter the sync of bug reports
*/
public SillynessPotPourri(BugReporter bugReporter) {
this.bugReporter = bugReporter;
toStringClasses = new HashSet<String>();
}
@Override
public void visitField(Field field) {
if ("serialVersionUID".equals(field.getName())
&& ((field.getAccessFlags() & ACC_STATIC) != 0)
&& ((field.getAccessFlags() & ACC_PRIVATE) == 0)) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_SERIALVER_SHOULD_BE_PRIVATE.name(), LOW_PRIORITY)
.addClass(this)
.addField(this));
}
}
@Override
public void visitClassContext(ClassContext classContext) {
try {
stack = new OpcodeStack();
lastPCs = new int[4];
branchTargets = new HashMap<Integer, BitSet>();
super.visitClassContext(classContext);
} finally {
stack = null;
lastPCs = null;
branchTargets = null;
staticConstants = null;
}
}
/**
* implements the visitor to reset the opcode stack
*
* @param obj the context object for the currently parsed Code
*/
@Override
public void visitCode(Code obj) {
stack.resetForMethodEntry(this);
lastOpcode = -1;
lastReg = -1;
lastIfEqWasBoolean = false;
lastLoadWasString = false;
Arrays.fill(lastPCs, -1);
branchTargets.clear();
super.visitCode(obj);
}
/**
* implements the visitor to look for various silly bugs
*
* @param seen the opcode of the currently parsed instruction
*/
@Override
public void sawOpcode(int seen) {
int reg = -1;
String userValue = null;
try {
stack.precomputation(this);
if (isBranchByteCode(seen)) {
Integer branchTarget = Integer.valueOf(getBranchTarget());
BitSet branchInsSet = branchTargets.get(branchTarget);
if (branchInsSet == null)
{
branchInsSet = new BitSet();
branchTargets.put(branchTarget, branchInsSet);
}
branchInsSet.set(getPC());
}
//not an else if, because some of the opcodes in the previous branch also matter here.
if ((seen == IFEQ) || (seen == IFLE) || (seen == IFNE)) {
checkForEmptyStringAndNullChecks(seen);
}
//see above, several opcodes hit multiple branches.
if ((seen == IFEQ) || (seen == IFNE) || (seen == IFGT)) {
checkSizeEquals0();
}
if (seen == IFEQ) {
checkNullAndInstanceOf();
}
if (seen == IFNE) {
checkNotEqualsStringBuilderLength();
} else if (seen == IFEQ) {
checkEqualsStringBufferLength();
} else if ((seen == IRETURN) && lastIfEqWasBoolean) {
checkForUselessTernaryReturn();
} else if (seen == LDC2_W) {
checkApproximationsOfMathConstants();
} else if (seen == DCMPL) {
checkCompareToNaNDouble();
} else if (seen == FCMPL) {
checkCompareToNaNFloat();
} else if (OpcodeUtils.isAStore(seen)) {
reg = RegisterUtils.getAStoreReg(this, seen);
checkStutterdAssignment(seen, reg);
checkImmutableUsageOfStringBuilder(reg);
} else if (OpcodeUtils.isALoad(seen)) {
sawLoad(seen);
} else if ((seen >= ICONST_0) && (seen <= ICONST_3)) {
userValue = sawIntConst(userValue);
} else if (seen == CALOAD) {
checkImproperToCharArrayUse();
} else if (seen == INVOKESTATIC) {
userValue = sawInvokeStatic(userValue);
} else if (seen == INVOKEVIRTUAL) {
userValue = sawInvokeVirtual(userValue);
} else if (seen == INVOKESPECIAL) {
sawInvokeSpecial();
} else if (seen == INVOKEINTERFACE) {
userValue = sawInvokeInterface(userValue);
}
} catch (ClassNotFoundException cnfe) {
bugReporter.reportMissingClass(cnfe);
} finally {
TernaryPatcher.pre(stack, seen);
stack.sawOpcode(this, seen);
TernaryPatcher.post(stack, seen);
if ((stack.getStackDepth() > 0)) {
OpcodeStack.Item item = stack.getStackItem(0);
if (userValue != null) {
item.setUserValue(userValue);
} else if ("iterator".equals(item.getUserValue()) && (seen == GETFIELD) || (seen == ALOAD) || ((seen >= ALOAD_0) && (seen <= ALOAD_3))) {
item.setUserValue(null);
}
}
lastOpcode = seen;
lastReg = reg;
System.arraycopy(lastPCs, 1, lastPCs, 0, 3);
lastPCs[3] = getPC();
}
}
private void checkImproperToCharArrayUse() {
if (stack.getStackDepth() > 0) {
OpcodeStack.Item item = stack.getStackItem(0);
String ic = (String)item.getUserValue();
if ("iconst".equals(ic)) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_USE_CHARAT.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
private String sawIntConst(String userValue) {
if (stack.getStackDepth() > 0) {
OpcodeStack.Item item = stack.getStackItem(0);
String tca = (String)item.getUserValue();
if ("toCharArray".equals(tca)) {
userValue = "iconst";
}
}
return userValue;
}
private void sawLoad(int seen) {
lastLoadWasString = false;
LocalVariableTable lvt = getMethod().getLocalVariableTable();
if (lvt != null) {
LocalVariable lv = LVTHelper.getLocalVariableAtPC(lvt, RegisterUtils.getALoadReg(this, seen), getPC());
if (lv != null) {
lastLoadWasString = "Ljava/lang/String;".equals(lv.getSignature());
}
}
}
private void checkStutterdAssignment(int seen, int reg) {
if (seen == lastOpcode && reg == lastReg) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_STUTTERED_ASSIGNMENT.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
private void checkImmutableUsageOfStringBuilder(int reg) {
if (stack.getStackDepth() > 0) {
OpcodeStack.Item item = stack.getStackItem(0);
String mName = (String) item.getUserValue();
if (mName != null) {
if ("trim".equals(mName)) {
item.setUserValue(null);
} else {
Matcher m = APPEND_PATTERN.matcher(mName);
if (m.matches()) {
int appendReg = Integer.parseInt(m.group(1));
if (reg == appendReg) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_STRINGBUILDER_IS_MUTABLE.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
}
}
private void checkCompareToNaNFloat() {
if (stack.getStackDepth() > 1) {
OpcodeStack.Item item = stack.getStackItem(0);
Float f1 = (Float)item.getConstant();
item = stack.getStackItem(1);
Float f2 = (Float)item.getConstant();
if (((f1 != null) && f1.isNaN()) || ((f2 != null) && f2.isNaN())) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_USE_ISNAN.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this)
.addString("float")
.addString("Float"));
}
}
}
private void checkCompareToNaNDouble() {
if (stack.getStackDepth() > 1) {
OpcodeStack.Item item = stack.getStackItem(0);
Double d1 = (Double)item.getConstant();
item = stack.getStackItem(1);
Double d2 = (Double)item.getConstant();
if (((d1 != null) && d1.isNaN()) || ((d2 != null) && d2.isNaN())) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_USE_ISNAN.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this)
.addString("double")
.addString("Double"));
}
}
}
private void checkApproximationsOfMathConstants() {
Object con = getConstantRefOperand();
if (con instanceof ConstantDouble) {
double d = ((ConstantDouble) con).getBytes();
double piDelta = Math.abs(d - Math.PI);
double eDelta = Math.abs(d - Math.E);
if (((piDelta > 0.0) && (piDelta < 0.002))
|| ((eDelta > 0.0) && (eDelta < 0.002))) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_USE_MATH_CONSTANT.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
private void checkForUselessTernaryReturn() {
byte[] bytes = getCode().getCode();
if ((lastPCs[0] != -1)
&& ((0x00FF & bytes[lastPCs[3]]) == ICONST_0)
&& ((0x00FF & bytes[lastPCs[2]]) == GOTO)
&& ((0x00FF & bytes[lastPCs[1]]) == ICONST_1)
&& ((0x00FF & bytes[lastPCs[0]]) == IFEQ)) {
if (getMethod().getSignature().endsWith("Z")) {
boolean bug = true;
BitSet branchInsSet = branchTargets.get(Integer.valueOf(lastPCs[1]));
if (branchInsSet != null)
{
bug = false;
}
branchInsSet = branchTargets.get(Integer.valueOf(lastPCs[3]));
if ((branchInsSet != null) && (branchInsSet.cardinality() > 1))
{
bug = false;
}
if (bug) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_USELESS_TERNARY.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
private void checkEqualsStringBufferLength() {
if (stack.getStackDepth() > 0) {
OpcodeStack.Item itm = stack.getStackItem(0);
lastIfEqWasBoolean = "Z".equals(itm.getSignature());
}
byte[] bytes = getCode().getCode();
if (lastPCs[1] != -1) {
if (CodeByteUtils.getbyte(bytes, lastPCs[3]) == INVOKEVIRTUAL) {
int loadIns = CodeByteUtils.getbyte(bytes, lastPCs[2]);
if (((loadIns == LDC) || (loadIns == LDC_W))
&& (CodeByteUtils.getbyte(bytes, lastPCs[1]) == INVOKEVIRTUAL)) {
ConstantPool pool = getConstantPool();
int toStringIndex = CodeByteUtils.getshort(bytes, lastPCs[1]+1);
Constant cmr = pool.getConstant(toStringIndex);
if (cmr instanceof ConstantMethodref) {
ConstantMethodref toStringMR = (ConstantMethodref)cmr;
String toStringCls = toStringMR.getClass(pool);
if (toStringCls.startsWith("java.lang.&&StringBu")) {
int consIndex = CodeByteUtils.getbyte(bytes, lastPCs[2]+1);
Constant c = pool.getConstant(consIndex);
if (c instanceof ConstantString) {
if ("".equals(((ConstantString) c).getBytes(pool))) {
int nandtIndex = toStringMR.getNameAndTypeIndex();
ConstantNameAndType cnt = (ConstantNameAndType)pool.getConstant(nandtIndex);
if ("toString".equals(cnt.getName(pool))) {
int lengthIndex = CodeByteUtils.getshort(bytes, lastPCs[3]+1);
ConstantMethodref lengthMR = (ConstantMethodref)pool.getConstant(lengthIndex);
nandtIndex = lengthMR.getNameAndTypeIndex();
cnt = (ConstantNameAndType)pool.getConstant(nandtIndex);
if ("equals".equals(cnt.getName(pool))) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_USE_STRINGBUILDER_LENGTH.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
}
}
}
}
}
}
private void checkNotEqualsStringBuilderLength() {
byte[] bytes = getCode().getCode();
if (lastPCs[2] != -1) {
if ((CodeByteUtils.getbyte(bytes, lastPCs[3]) == INVOKEVIRTUAL)
&& (CodeByteUtils.getbyte(bytes, lastPCs[2]) == INVOKEVIRTUAL)) {
ConstantPool pool = getConstantPool();
int toStringIndex = CodeByteUtils.getshort(bytes, lastPCs[2]+1);
ConstantMethodref toStringMR = (ConstantMethodref)pool.getConstant(toStringIndex);
String toStringCls = toStringMR.getClass(pool);
if (toStringCls.startsWith("java.lang.StringBu")) {
int nandtIndex = toStringMR.getNameAndTypeIndex();
ConstantNameAndType cnt = (ConstantNameAndType)pool.getConstant(nandtIndex);
if ("toString".equals(cnt.getName(pool))) {
int lengthIndex = CodeByteUtils.getshort(bytes, lastPCs[3]+1);
ConstantMethodref lengthMR = (ConstantMethodref)pool.getConstant(lengthIndex);
nandtIndex = lengthMR.getNameAndTypeIndex();
cnt = (ConstantNameAndType)pool.getConstant(nandtIndex);
if ("length".equals(cnt.getName(pool))) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_USE_STRINGBUILDER_LENGTH.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
}
}
private void checkNullAndInstanceOf() {
byte[] bytes = getCode().getCode();
if ((lastPCs[0] != -1) && (CodeByteUtils.getbyte(bytes, lastPCs[1]) == IFNULL) && (CodeByteUtils.getbyte(bytes, lastPCs[3]) == INSTANCEOF)) {
int ins0 = CodeByteUtils.getbyte(bytes, lastPCs[0]);
if ((ins0 == ALOAD) || (ins0 == ALOAD_0) || (ins0 == ALOAD_1) || (ins0 == ALOAD_2) || (ins0 == ALOAD_3)) {
int ins2 = CodeByteUtils.getbyte(bytes, lastPCs[0]);
if (ins0 == ins2) {
if ((ins0 != ALOAD) || (CodeByteUtils.getbyte(bytes, lastPCs[0] + 1) == CodeByteUtils.getbyte(bytes, lastPCs[2] + 1))) {
int ifNullTarget = lastPCs[1] + CodeByteUtils.getshort(bytes, lastPCs[1]+1);
if (ifNullTarget == getBranchTarget()) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_NULL_BEFORE_INSTANCEOF.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
}
}
private void checkSizeEquals0() {
if (stack.getStackDepth() == 1) {
OpcodeStack.Item item = stack.getStackItem(0);
if ("size".equals(item.getUserValue())) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_USE_ISEMPTY.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
private void checkForEmptyStringAndNullChecks(int seen) {
if (lastLoadWasString && (lastPCs[0] != -1)) {
byte[] bytes = getCode().getCode();
int loadIns = CodeByteUtils.getbyte(bytes, lastPCs[2]);
int brOffset = (loadIns == ALOAD) ? 11 : 10;
if ((((loadIns >= ALOAD_0) && (loadIns <= ALOAD_3)) || (loadIns == ALOAD))
&& (CodeByteUtils.getbyte(bytes, lastPCs[3]) == INVOKEVIRTUAL)
&& (CodeByteUtils.getbyte(bytes, lastPCs[2]) == loadIns)
&& (CodeByteUtils.getbyte(bytes, lastPCs[1]) == IFNULL)
&& (CodeByteUtils.getbyte(bytes, lastPCs[0]) == loadIns)
&& ((loadIns != ALOAD) || (CodeByteUtils.getbyte(bytes, lastPCs[2]+1) == CodeByteUtils.getbyte(bytes, lastPCs[0]+1)))
&& ((seen == IFNE) ? CodeByteUtils.getshort(bytes, lastPCs[1]+1) > brOffset : CodeByteUtils.getshort(bytes, lastPCs[1]+1) == brOffset)) {
int nextOp = CodeByteUtils.getbyte(bytes, getNextPC());
if ((nextOp != GOTO) && (nextOp != GOTO_W)) {
ConstantPool pool = getConstantPool();
int mpoolIndex = CodeByteUtils.getshort(bytes, lastPCs[3]+1);
ConstantMethodref cmr = (ConstantMethodref)pool.getConstant(mpoolIndex);
int nandtIndex = cmr.getNameAndTypeIndex();
ConstantNameAndType cnt = (ConstantNameAndType)pool.getConstant(nandtIndex);
if ("length".equals(cnt.getName(pool))) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_SUSPECT_STRING_TEST.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
}
private boolean isBranchByteCode(int seen) {
return ((seen >= IFEQ) && (seen <= GOTO)) || (seen == IFNULL) || (seen == IFNONNULL) || (seen == GOTO_W);
}
private String sawInvokeStatic(String userValue) {
String className = getClassConstantOperand();
String methodName = getNameConstantOperand();
if ("java/lang/System".equals(className)) {
if ("getProperties".equals(methodName)) {
userValue = "getProperties";
} else if ("arraycopy".equals(methodName)) {
if (stack.getStackDepth() >= 5) {
OpcodeStack.Item item = stack.getStackItem(2);
String sig = item.getSignature();
if ((sig.charAt(0) != '[') && !"Ljava/lang/Object;".equals(sig)) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_NON_ARRAY_PARM.name(), HIGH_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
item = stack.getStackItem(4);
sig = item.getSignature();
if ((sig.charAt(0) != '[') && !"Ljava/lang/Object;".equals(sig)) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_NON_ARRAY_PARM.name(), HIGH_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
} else if ("java/lang/reflect/Array".equals(className)) {
int offset = -1;
if ("getLength".equals(methodName)) {
offset = 0;
} else if (methodName.startsWith("get")) {
offset = 1;
} else if (methodName.startsWith("set")) {
offset = 2;
}
if (offset >= 0) {
if (stack.getStackDepth() > offset) {
OpcodeStack.Item item = stack.getStackItem(offset);
String sig = item.getSignature();
if ((sig.charAt(0) != '[') && !"Ljava/lang/Object;".equals(sig)) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_NON_ARRAY_PARM.name(), HIGH_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
return userValue;
}
private String sawInvokeVirtual(String userValue) throws ClassNotFoundException {
String className = getClassConstantOperand();
String methodName = getNameConstantOperand();
if ("java/util/BitSet".equals(className)) {
bitSetSilliness(methodName);
} else if ("java/lang/StringBuilder".equals(className) || "java/lang/StringBuffer".equals(className)) {
userValue = stringBufferSilliness(userValue, methodName);
} else if ("java/lang/String".equals(className)) {
userValue = stringSilliness(userValue, methodName, getSigConstantOperand());
} else if ("equals(Ljava/lang/Object;)Z".equals(methodName + getSigConstantOperand())) {
equalsSilliness(className);
} else if ("java/lang/Boolean".equals(className) && "booleanValue".equals(methodName)) {
booleanSilliness();
} else if (("java/util/GregorianCalendar".equals(className) || "java/util/Calendar".equals(className))
&& ("after".equals(methodName) || "before".equals(methodName))) {
calendarBeforeAfterSilliness();
} else if ("java/util/Properties".equals(className)) {
propertiesSilliness(methodName);
} else if ("toString".equals(methodName) && "java/lang/Object".equals(className)) {
defaultToStringSilliness();
}
return userValue;
}
private void bitSetSilliness(String methodName) {
if ("clear".equals(methodName)
|| "flip".equals(methodName)
|| "get".equals(methodName)
|| "set".equals(methodName)) {
if (stack.getStackDepth() > 0) {
OpcodeStack.Item item = stack.getStackItem(0);
Object o =item.getConstant();
if (o instanceof Integer) {
if (((Integer) o).intValue() < 0) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_NEGATIVE_BITSET_ITEM.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
}
private String stringBufferSilliness(String userValue, String methodName) {
if ("append".equals(methodName)) {
if (stack.getStackDepth() > 1) {
OpcodeStack.Item valItem = stack.getStackItem(0);
OpcodeStack.Item sbItem = stack.getStackItem(1);
Object constant = valItem.getConstant();
boolean argIsLiteralString = (constant instanceof String) && (((String) constant).length() > 0);
argIsLiteralString = argIsLiteralString && !looksLikeStaticFieldValue((String) constant);
if (argIsLiteralString) {
String existingAppend = (String) sbItem.getUserValue();
if (existingAppend != null) {
Matcher m = APPEND_PATTERN.matcher(existingAppend);
if (m.matches() && LITERAL.equals(m.group(2))) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_DOUBLE_APPENDED_LITERALS.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
argIsLiteralString = false;
}
}
}
String literal = argIsLiteralString ? LITERAL : "";
if (sbItem.getRegisterNumber() > -1) {
userValue = "append:" + sbItem.getRegisterNumber() + ':' + literal;
} else {
userValue = (String) sbItem.getUserValue();
if (userValue != null) {
Matcher m = APPEND_PATTERN.matcher(userValue);
if (m.matches()) {
userValue = "append:" + m.group(1) + ':' + literal;
}
}
}
}
}
return userValue;
}
private String stringSilliness(String userValue, String methodName, String signature) {
Integer stackOffset = methodsThatAreSillyOnStringLiterals.get(methodName + signature);
if (stackOffset != null) {
if (stack.getStackDepth() > stackOffset) {
OpcodeStack.Item itm = stack.getStackItem(stackOffset.intValue());
Object constant = itm.getConstant();
if ((constant != null) && constant.getClass().equals(String.class) && (itm.getXField() == null)) {
int priority = NORMAL_PRIORITY;
if (Type.getArgumentTypes(getSigConstantOperand()).length > 0) {
//if an argument is passed in, it may be locale-specific
priority = LOW_PRIORITY;
}
bugReporter.reportBug(new BugInstance(this, BugType.SPP_CONVERSION_OF_STRING_LITERAL.name(), priority)
.addClass(this)
.addMethod(this)
.addSourceLine(this)
.addCalledMethod(this));
}
}
}
//not an elseif because the below cases might be in the set methodsThatAreSillyOnStringLiterals
if ("intern".equals(methodName)) {
String owningMethod = getMethod().getName();
if (!Values.STATIC_INITIALIZER.equals(owningMethod))
{
if (stack.getStackDepth() > 0) {
OpcodeStack.Item item = stack.getStackItem(0);
if (item.getConstant() != null) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_INTERN_ON_CONSTANT.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
else if ("toCharArray".equals(methodName)) {
userValue = "toCharArray";
} else if ("toLowerCase".equals(methodName) || "toUpperCase".equals(methodName)) {
userValue = "IgnoreCase";
} else if ("equalsIgnoreCase".equals(methodName) || "compareToIgnoreCase".equals(methodName)) {
if (stack.getStackDepth() > 1) {
OpcodeStack.Item item = stack.getStackItem(1);
if ("IgnoreCase".equals(item.getUserValue())) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_USELESS_CASING.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
item = stack.getStackItem(0);
String parm = (String)item.getConstant();
if ("".equals(parm)) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_EMPTY_CASING.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
} else if ("trim".equals(methodName)) {
userValue = "trim";
} else if ("length".equals(methodName)) {
if (stack.getStackDepth() > 0) {
OpcodeStack.Item item = stack.getStackItem(0);
if ("trim".equals(item.getUserValue())) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_TEMPORARY_TRIM.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
} else if ("equals".equals(methodName)) {
if (stack.getStackDepth() > 1) {
OpcodeStack.Item item = stack.getStackItem(1);
if ("trim".equals(item.getUserValue())) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_TEMPORARY_TRIM.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
else if ("toString".equals(methodName)) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_TOSTRING_ON_STRING.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
return userValue;
}
private void equalsSilliness(String className) {
try {
JavaClass cls = Repository.lookupClass(className);
if (cls.isEnum()) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_EQUALS_ON_ENUM.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
} else {
if (stack.getStackDepth() >= 2) {
OpcodeStack.Item item = stack.getStackItem(1);
cls = item.getJavaClass();
if (cls != null) {
String clsName = cls.getClassName();
if (oddMissingEqualsClasses.contains(clsName)) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_EQUALS_ON_STRING_BUILDER.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
} catch (ClassNotFoundException cnfe) {
bugReporter.reportMissingClass(cnfe);
}
}
private void booleanSilliness() {
if (lastPCs[0] != -1) {
int range1Size = lastPCs[2] - lastPCs[0];
if (range1Size == (getNextPC() - lastPCs[3])) {
byte[] bytes = getCode().getCode();
int ifeq = 0x000000FF & bytes[lastPCs[2]];
if (ifeq == IFEQ) {
int start1 = lastPCs[0];
int start2 = lastPCs[3];
boolean found = true;
for (int i = 0; i < range1Size; i++) {
if (bytes[start1+i] != bytes[start2+i]) {
found = false;
break;
}
}
if (found) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_INVALID_BOOLEAN_NULL_CHECK.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
}
private void calendarBeforeAfterSilliness() {
if (stack.getStackDepth() > 1) {
OpcodeStack.Item item = stack.getStackItem(0);
String itemSig = item.getSignature();
//Rule out java.lang.Object as mergeJumps can throw away type info (BUG)
if (!"Ljava/lang/Object;".equals(itemSig) && !"Ljava/util/Calendar;".equals(itemSig) && !"Ljava/util/GregorianCalendar;".equals(itemSig)) {
try {
JavaClass cls = Repository.lookupClass(itemSig.substring(1, itemSig.length() - 1));
if (!cls.instanceOf(calendarClass)) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_INVALID_CALENDAR_COMPARE.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
} catch (ClassNotFoundException cnfe) {
bugReporter.reportMissingClass(cnfe);
}
}
}
}
private void defaultToStringSilliness() throws ClassNotFoundException {
if (stack.getStackDepth() >= 1) {
OpcodeStack.Item item = stack.getStackItem(0);
JavaClass toStringClass = item.getJavaClass();
if (toStringClass != null) {
String toStringClassName = toStringClass.getClassName();
if (!toStringClass.isInterface() && !toStringClass.isAbstract() && !"java.lang.Object".equals(toStringClassName) && !"java.lang.String".equals(toStringClassName) && toStringClasses.add(toStringClassName)) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_NON_USEFUL_TOSTRING.name(), toStringClass.isFinal() ? NORMAL_PRIORITY : LOW_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
private void propertiesSilliness(String methodName) {
if (("get".equals(methodName) || "getProperty".equals(methodName))) {
if (stack.getStackDepth() > 1) {
OpcodeStack.Item item = stack.getStackItem(1);
if ("getProperties".equals(item.getUserValue())) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_USE_GETPROPERTY.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
private String sawInvokeInterface(String userValue) {
String className = getClassConstantOperand();
if ("java/util/Map".equals(className)) {
String method = getNameConstantOperand();
if ("keySet".equals(method)) {
userValue = "keySet";
}
} else if ("java/util/Set".equals(className)) {
String method = getNameConstantOperand();
if ("contains".equals(method)) {
if (stack.getStackDepth() >= 2) {
OpcodeStack.Item item = stack.getStackItem(1);
if ("keySet".equals(item.getUserValue())) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_USE_CONTAINSKEY.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
} else if ("java/util/List".equals(className)) {
String method = getNameConstantOperand();
if ("iterator".equals(method)) {
userValue = "iterator";
}
} else if ("java/util/Iterator".equals(className)) {
String method = getNameConstantOperand();
if ("next".equals(method)) {
if (stack.getStackDepth() >= 1) {
OpcodeStack.Item item = stack.getStackItem(0);
if ("iterator".equals(item.getUserValue())) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_USE_GET0.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
if (collectionInterfaces.contains(className)) {
String method = getNameConstantOperand();
if ("size".equals(method)) {
userValue = "size";
}
}
return userValue;
}
private void sawInvokeSpecial() {
String className = getClassConstantOperand();
if ("java/lang/StringBuffer".equals(className)
|| "java/lang/StringBuilder".equals(className)) {
String methodName = getNameConstantOperand();
if (Values.CONSTRUCTOR.equals(methodName)) {
String signature = getSigConstantOperand();
if ("(I)V".equals(signature)) {
if (lastOpcode == BIPUSH) {
if (stack.getStackDepth() > 0) {
OpcodeStack.Item item = stack.getStackItem(0);
Object o = item.getConstant();
if (o instanceof Integer) {
int parm = ((Integer) o).intValue();
if ((parm > 32)
&& (parm < 127)
&& (parm != 64)
&& ((parm % 10) != 0)
&& ((parm % 5) != 0)) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_NO_CHAR_SB_CTOR.name(), LOW_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
} else if ("(Ljava/lang/String;)V".equals(signature)) {
if (stack.getStackDepth() > 0) {
OpcodeStack.Item item = stack.getStackItem(0);
String con = (String)item.getConstant();
if ("".equals(con)) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_STRINGBUFFER_WITH_EMPTY_STRING.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
} else if ("java/math/BigDecimal".equals(className)) {
if (stack.getStackDepth() > 0) {
OpcodeStack.Item item = stack.getStackItem(0);
Object constant = item.getConstant();
if (constant instanceof Double)
{
Double v = (Double) constant;
if ((v != 0.0) && (v != 1.0)) {
bugReporter.reportBug(new BugInstance(this, BugType.SPP_USE_BIGDECIMAL_STRING_CTOR.name(), NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
}
}
private boolean looksLikeStaticFieldValue(String constant) {
if (staticConstants == null) {
staticConstants = new HashSet<String>();
Field[] fields = getClassContext().getJavaClass().getFields();
for (Field f : fields) {
if (((f.getAccessFlags() & (Constants.ACC_FINAL|Constants.ACC_STATIC)) == (Constants.ACC_FINAL|Constants.ACC_STATIC)) && "Ljava/lang/String;".equals(f.getSignature())) {
ConstantValue cv = f.getConstantValue();
if (cv != null) {
int cvIndex = cv.getConstantValueIndex();
staticConstants.add(getConstantPool().getConstantString(cvIndex, Constants.CONSTANT_String));
}
}
}
}
return staticConstants.contains(constant);
}
} |
package com.redhat.ceylon.model.typechecker.model;
import static com.redhat.ceylon.model.typechecker.model.SiteVariance.IN;
import static com.redhat.ceylon.model.typechecker.model.SiteVariance.OUT;
import static java.lang.Character.charCount;
import static java.lang.Character.isLowerCase;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.redhat.ceylon.common.Backend;
import com.redhat.ceylon.common.BackendSupport;
public class ModelUtil {
static final List<Type> NO_TYPE_ARGS =
Collections.<Type>emptyList();
static final Map<TypeParameter, Type> EMPTY_TYPE_ARG_MAP =
Collections.<TypeParameter,Type>emptyMap();
static final Map<TypeParameter, SiteVariance> EMPTY_VARIANCE_MAP =
emptyMap();
/**
* Is the second scope contained by the first scope?
*/
public static boolean contains(Scope outer, Scope inner) {
if (outer != null) {
while (inner!=null) {
if (inner.equals(outer)) {
return true;
}
inner = inner.getScope();
}
}
return false;
}
/**
* Get the nearest containing scope that is not a
* ConditionScope.
*/
public static Scope getRealScope(Scope scope) {
while (!(scope instanceof Package)) {
if (!(scope instanceof ConditionScope)) {
return scope;
}
scope = scope.getContainer();
}
return null;
}
/**
* Get the class or interface that "this" and "super"
* refer to.
*/
public static ClassOrInterface getContainingClassOrInterface(Scope scope) {
while (!(scope instanceof Package)) {
if (scope instanceof ClassOrInterface) {
return (ClassOrInterface) scope;
}
scope = scope.getContainer();
}
return null;
}
/**
* Get the declaration that contains the specified declaration, if any.
*/
public static Declaration getContainingDeclaration(Declaration d) {
if (d.isToplevel()) return null;
Scope scope = d.getContainer();
while (!(scope instanceof Package)) {
if (scope instanceof Declaration) {
return (Declaration) scope;
}
scope = scope.getContainer();
}
return null;
}
/**
* Get the declaration that contains the specified scope, if any.
*/
public static Declaration getContainingDeclarationOfScope(Scope scope) {
while (!(scope instanceof Package)) {
if (scope instanceof Declaration) {
return (Declaration) scope;
}
scope = scope.getContainer();
}
return null;
}
/**
* Get the class or interface that "outer" refers to.
*/
public static Type getOuterClassOrInterface(Scope scope) {
Boolean foundInner = false;
while (!(scope instanceof Package)) {
if (scope instanceof ClassOrInterface) {
if (foundInner) {
ClassOrInterface ci =
(ClassOrInterface) scope;
return ci.getType();
}
else {
foundInner = true;
}
}
scope = scope.getContainer();
}
return null;
}
/**
* Convenience method to bind a single type argument
* to a toplevel type declaration.
*/
public static Type appliedType(
TypeDeclaration declaration,
Type typeArgument) {
if (declaration==null) return null;
return declaration.appliedType(null,
singletonList(typeArgument));
}
/**
* Convenience method to bind a list of type arguments
* to a toplevel type declaration.
*/
public static Type appliedType(
TypeDeclaration declaration,
Type... typeArguments) {
if (declaration==null) return null;
return declaration.appliedType(null,
asList(typeArguments));
}
public static boolean isResolvable(Declaration declaration) {
return declaration.getName()!=null &&
!declaration.isSetter() && //return getters, not setters
!declaration.isAnonymous(); //don't return the type associated with an object dec
}
public static boolean isAbstraction(Declaration d) {
return d!=null && d.isAbstraction();
}
public static boolean notOverloaded(Declaration d) {
if (d==null || !d.isFunctional()) {
return true;
}
else {
return !d.isOverloaded() || d.isAbstraction();
}
}
public static boolean isOverloadedVersion(Declaration decl) {
return decl!=null &&
(decl.isOverloaded() &&
!decl.isAbstraction()) &&
!decl.isNative();
}
static boolean hasMatchingSignature(
Declaration dec,
List<Type> signature, boolean ellipsis) {
return hasMatchingSignature(dec,
signature, ellipsis, true);
}
static boolean hasMatchingSignature(
Declaration dec,
List<Type> signature, boolean spread,
boolean excludeAbstractClasses) {
if (excludeAbstractClasses &&
dec instanceof Class &&
((Class) dec).isAbstract()) {
return false;
}
if (dec instanceof Functional) {
if (dec.isAbstraction()) {
return false;
}
Functional f = (Functional) dec;
Unit unit = dec.getUnit();
List<ParameterList> pls = f.getParameterLists();
if (pls!=null && !pls.isEmpty()) {
ParameterList pl = pls.get(0);
List<Parameter> params = pl.getParameters();
int size = params.size();
boolean hasSeqParam =
pl.hasSequencedParameter();
int sigSize = signature.size();
if (hasSeqParam) {
size
if (sigSize<size) {
return false;
}
}
else if (sigSize!=size) {
return false;
}
for (int i=0; i<size; i++) {
FunctionOrValue pm =
params.get(i).getModel();
if (pm==null) {
return false;
}
Type pdt =
pm.appliedReference(null,
NO_TYPE_ARGS)
.getFullType();
if (pdt==null) {
return false;
}
Type sdt = signature.get(i);
if (!matches(sdt, pdt, unit)) {
return false;
}
}
if (hasSeqParam) {
FunctionOrValue model =
params.get(size).getModel();
Type pdt =
model.appliedReference(null,
NO_TYPE_ARGS)
.getFullType();
if (pdt==null ||
pdt.getTypeArgumentList()
.isEmpty()) {
return false;
}
//Note: don't use Unit.getIterableType()
// because this gets called from
// model loader out-of-phase
Type ipdt =
pdt.getTypeArgumentList()
.get(0);
for (int i=size; i<sigSize; i++) {
if (spread && i==sigSize-1) {
Type sdt = signature.get(i);
Type isdt =
unit.getIteratedType(sdt);
if (!matches(isdt, ipdt, unit)) {
return false;
}
}
else {
Type sdt = signature.get(i);
if (!matches(sdt, ipdt, unit)) {
return false;
}
}
}
}
else if (spread) {
// if the method doesn't take sequenced
// params and we have an ellipsis let's
// not use it since we expect a variadic
// method
// TODO: this is basically wrong now that
// we can spread tuples
return false;
}
return true;
}
else {
return false;
}
}
else {
return false;
}
}
public static boolean matches(
Type argType,
Type paramType,
Unit unit) {
if (paramType==null || argType==null) {
return false;
}
//Ignore optionality for resolving overloads, since
//all Java parameters are treated as optional,
//except primitive-typed parameters
Type nvt = unit.getNullType();
if (nvt.isSubtypeOf(argType) &&
!nvt.isSubtypeOf(paramType)) {
return false; //only for primitives
}
Type defParamType = unit.getDefiniteType(paramType);
Type defArgType = unit.getDefiniteType(argType);
Type nt = unit.getNullType();
if (defArgType.isSubtypeOf(nt)) {
return true;
}
if (isTypeUnknown(defArgType) ||
isTypeUnknown(defParamType)) {
return false;
}
if (!erase(defArgType, unit)
.inherits(erase(defParamType, unit)) &&
notUnderlyingTypesEqual(defParamType,
defArgType)) {
return false;
}
return true;
}
private static boolean notUnderlyingTypesEqual(
Type paramType,
Type sigType) {
String sut = sigType.getUnderlyingType();
String put = paramType.getUnderlyingType();
return sut==null || put==null || !sut.equals(put);
}
static boolean betterMatch(Declaration d, Declaration r,
List<Type> signature) {
if (d instanceof Functional &&
r instanceof Functional) {
Functional df = (Functional) d;
Functional rf = (Functional) r;
List<ParameterList> dpls = df.getParameterLists();
List<ParameterList> rpls = rf.getParameterLists();
if (dpls!=null && !dpls.isEmpty() &&
rpls!=null && !rpls.isEmpty()) {
ParameterList dpls0 = dpls.get(0);
ParameterList rpls0 = rpls.get(0);
List<Parameter> dpl = dpls0.getParameters();
List<Parameter> rpl = rpls0.getParameters();
int dplSize = dpl.size();
int rplSize = rpl.size();
//ignore sequenced parameters
boolean dhsp = dpls0.hasSequencedParameter();
boolean rhsp = rpls0.hasSequencedParameter();
//always prefer a signature without varargs
//over one with a varargs parameter
if (!dhsp && rhsp) {
return true;
}
if (dhsp && !rhsp) {
return false;
}
//ignore sequenced parameters
if (dhsp) dplSize
if (rhsp) rplSize
if (dplSize==rplSize) {
//if all parameters are of more specific
//or equal type, prefer it
Unit unit = d.getUnit();
for (int i=0; i<dplSize; i++) {
Type dplt =
dpl.get(i).getModel()
.appliedReference(null,
NO_TYPE_ARGS)
.getFullType();
Type paramType =
unit.getDefiniteType(dplt);
Type rplt =
rpl.get(i).getModel()
.appliedReference(null,
NO_TYPE_ARGS)
.getFullType();
Type otherType =
unit.getDefiniteType(rplt);
Type argumentType =
signature != null &&
signature.size() >= i ?
signature.get(i) :
null;
if (isTypeUnknown(otherType) ||
isTypeUnknown(paramType)) {
return false;
}
TypeDeclaration ptd =
erase(paramType, unit);
TypeDeclaration otd =
erase(otherType, unit);
if(paramType.isExactly(otherType) &&
supportsCoercion(ptd) &&
// do we have different scores?
hasWorseScore(
getCoercionScore(
argumentType,
paramType),
getCoercionScore(
argumentType,
otherType))) {
return false;
}
if (!ptd.inherits(otd) &&
notUnderlyingTypesEqual(
paramType,
otherType)) {
return false;
}
}
// check sequenced parameters last
if (dhsp && rhsp){
Type dplt =
dpl.get(dplSize).getModel()
.appliedReference(null,
NO_TYPE_ARGS)
.getFullType();
Type paramType =
unit.getDefiniteType(dplt);
Type rplt =
rpl.get(dplSize).getModel()
.appliedReference(null,
NO_TYPE_ARGS)
.getFullType();
Type otherType =
unit.getDefiniteType(rplt);
if (isTypeUnknown(otherType) ||
isTypeUnknown(paramType)) {
return false;
}
paramType =
unit.getIteratedType(paramType);
otherType =
unit.getIteratedType(otherType);
if (isTypeUnknown(otherType) ||
isTypeUnknown(paramType)) {
return false;
}
TypeDeclaration ptd =
erase(paramType, unit);
TypeDeclaration otd =
erase(otherType, unit);
if (paramType.isExactly(otherType) &&
supportsCoercion(ptd)) {
Type widerArgumentType =
getWiderArgumentType(
paramType,
signature,
dplSize);
// do we have different scores?
int pscore =
getCoercionScore(
widerArgumentType,
paramType);
int oscore =
getCoercionScore(
widerArgumentType,
otherType);
if (hasWorseScore(pscore, oscore)) {
return false;
}
}
if (!ptd.inherits(otd) &&
notUnderlyingTypesEqual(
paramType,
otherType)) {
return false;
}
}
return true;
}
}
}
return false;
}
private static boolean supportsCoercion(
TypeDeclaration decl) {
return decl.isInteger() || decl.isFloat();
}
private static boolean hasWorseScore(
int underlyingTypeCoercionScoreA,
int underlyingTypeCoercionScoreB) {
if (underlyingTypeCoercionScoreA !=
underlyingTypeCoercionScoreB) {
if (underlyingTypeCoercionScoreA > 0 &&
underlyingTypeCoercionScoreB > 0) {
// both truncations, prefer the smaller
// truncation
if (underlyingTypeCoercionScoreA >
underlyingTypeCoercionScoreB) {
return true;
}
}
else if(underlyingTypeCoercionScoreA > 0) {
// A is a truncation, B is a widening,
// prefer widening
return true;
}
else if(underlyingTypeCoercionScoreA == 0) {
// A is a perfect match, it's not worse
return false;
}
else if(underlyingTypeCoercionScoreB == 0) {
// B is a perfect match but A is not,
// so it's worse
return true;
}
else if(underlyingTypeCoercionScoreB > 0) {
// A is a widening, B is a truncation,
// so it's not worse
return false;
}
else {
// A is a widening and B is a widening too,
// A is worse than B if it widens more than B
return underlyingTypeCoercionScoreA <
underlyingTypeCoercionScoreB;
}
}
return false;// same score or we don't know
}
private static Type getWiderArgumentType(
Type paramType,
List<Type> signature,
int startAt) {
if (startAt >= signature.size()) {
return null;
}
TypeDeclaration decl = paramType.getDeclaration();
Unit unit = decl.getUnit();
if (decl.isInteger()) {
int bestScore = 0;
Type ret = null;
for(int i=startAt; i<signature.size(); i++){
Type argType = signature.get(i);
String underlyingType =
argType.getUnderlyingType();
int score = 0;
if (underlyingType == null ||
underlyingType.equals("long")) {
return argType; // found the wider sort
}
else if (underlyingType.equals("int")) {
score = 2;
}
else if (underlyingType.equals("short")) {
score = 1;
}
// keep the widest argument type
if (score > bestScore) {
bestScore = score;
ret = argType;
}
}
return ret;
}
else if (decl.equals(unit.getFloatDeclaration())) {
int bestScore = 0;
Type ret = null;
for (int i=startAt; i<signature.size(); i++) {
Type argType = signature.get(i);
String underlyingType =
argType.getUnderlyingType();
int score = 0;
if (underlyingType == null ||
underlyingType.equals("double")) {
return argType; // found the wider sort
}
else if (underlyingType.equals("float")) {
score = 1;
}
// keep the widest argument type
if (score > bestScore){
bestScore = score;
ret = argType;
}
}
return ret;
}
// not relevant
return null;
}
/**
* Returns 0 of there's no coercion, > 0 if we have to
* truncate the argument type to fit the param type,
* the higher for the worse truncation, or < 0 if we
* have to widen the argument type to fit the param type,
* the lower for the worse widening.
*/
private static int getCoercionScore(
Type argumentType,
Type paramType) {
if (argumentType == null) {
return 0;
}
// only consider types of Integer of Float
if (paramType.isExactly(argumentType)) {
String aType = argumentType.getUnderlyingType();
String pType = paramType.getUnderlyingType();
if (aType == null && pType == null) {
return 0;
}
Unit unit = argumentType.getDeclaration().getUnit();
TypeDeclaration decl = paramType.getDeclaration();
if (decl.isInteger()) {
if (aType == null) {
aType = "long";
}
if (pType == null) {
pType = "long";
}
int aScore = getPrimitiveScore(aType);
int bScore = getPrimitiveScore(pType);
/*
* aType aTypeScore pType pTypeScore score
* short 0 short 0 0
* short 0 int 1 -1 (widening)
* short 0 long 2 -2 (widening)
* int 1 short 0 1 (truncation)
* int 1 int 1 0
* int 1 long 2 -1 (widening)
* long 2 short 0 2 (truncation)
* long 2 int 1 1 (truncation)
* long 2 long 2 0
*/
return aScore - bScore;
}
else if (decl.equals(unit.getFloatDeclaration())) {
if (aType == null) {
aType = "double";
}
if (pType == null) {
pType = "double";
}
int aScore = getPrimitiveScore(aType);
int bScore = getPrimitiveScore(pType);
/*
* aType aTypeScore pType pTypeScore score
* float 0 float 0 0
* float 0 double 1 -1 (widening)
* double 1 float 0 1 (truncation)
* double 1 double 1 0
*/
return aScore - bScore;
}
}
// no truncation for the rest
return 0;
}
private static int getPrimitiveScore(String underlyingType) {
if (underlyingType.equals("long")) {
return 2;
}
if (underlyingType.equals("int") ||
underlyingType.equals("double")) {
return 1;
}
if (underlyingType.equals("short") ||
underlyingType.equals("float")) {
return 0;
}
return 0;
}
static boolean strictlyBetterMatch(Declaration d, Declaration r) {
if (d instanceof Functional &&
r instanceof Functional) {
Functional fd = (Functional) d;
Functional fr = (Functional) r;
List<ParameterList> dpls = fd.getParameterLists();
List<ParameterList> rpls = fr.getParameterLists();
if (dpls!=null && !dpls.isEmpty() &&
rpls!=null && !rpls.isEmpty()) {
ParameterList dpls0 = dpls.get(0);
ParameterList rpls0 = rpls.get(0);
List<Parameter> dpl = dpls0.getParameters();
List<Parameter> rpl = rpls0.getParameters();
int dplSize = dpl.size();
int rplSize = rpl.size();
//ignore sequenced parameters
boolean dhsp = dpls0.hasSequencedParameter();
boolean rhsp = rpls0.hasSequencedParameter();
//always prefer a signature without varargs
//over one with a varargs parameter
if (!dhsp && rhsp) {
return true;
}
if (dhsp && !rhsp) {
return false;
}
//ignore sequenced parameters
if (dhsp) dplSize
if (rhsp) rplSize
if (dplSize==rplSize) {
//if all parameters are of more specific
//or equal type, prefer it
boolean atLeastOneBetter = false;
Unit unit = d.getUnit();
for (int i=0; i<dplSize; i++) {
Type dplt =
dpl.get(i).getModel()
.appliedReference(null,
NO_TYPE_ARGS)
.getFullType();
Type paramType =
unit.getDefiniteType(dplt);
Type rplt =
rpl.get(i).getModel()
.appliedReference(null,
NO_TYPE_ARGS)
.getFullType();
Type otherType =
unit.getDefiniteType(rplt);
if (isTypeUnknown(otherType) ||
isTypeUnknown(paramType)) {
return false;
}
TypeDeclaration ptd =
erase(paramType, unit);
TypeDeclaration otd =
erase(otherType, unit);
if (!ptd.inherits(otd) &&
notUnderlyingTypesEqual(
paramType, otherType)) {
return false;
}
if (ptd.inherits(otd) &&
!otd.inherits(ptd) &&
notUnderlyingTypesEqual(
paramType, otherType)) {
atLeastOneBetter = true;
}
}
// check sequenced parameters last
if (dhsp && rhsp) {
Type dplt =
dpl.get(dplSize).getModel()
.appliedReference(null,
NO_TYPE_ARGS)
.getFullType();
Type paramType =
unit.getDefiniteType(dplt);
Type rplt =
rpl.get(dplSize).getModel()
.appliedReference(null,
NO_TYPE_ARGS)
.getFullType();
Type otherType =
unit.getDefiniteType(rplt);
if (isTypeUnknown(otherType) ||
isTypeUnknown(paramType)) {
return false;
}
paramType =
unit.getIteratedType(paramType);
otherType =
unit.getIteratedType(otherType);
if (isTypeUnknown(otherType) ||
isTypeUnknown(paramType)) {
return false;
}
TypeDeclaration ptd =
erase(paramType, unit);
TypeDeclaration otd =
erase(otherType, unit);
if (!ptd.inherits(otd) &&
notUnderlyingTypesEqual(
paramType,
otherType)) {
return false;
}
if (ptd.inherits(otd) &&
!otd.inherits(ptd) &&
notUnderlyingTypesEqual(
paramType,
otherType)) {
atLeastOneBetter = true;
}
}
return atLeastOneBetter;
}
}
}
return false;
}
public static boolean isNamed(String name, Declaration d) {
String dname = d.getName();
return dname!=null && dname.equals(name);
}
static TypeDeclaration erase(Type paramType, Unit unit) {
if (paramType.isTypeParameter()) {
if (paramType.getSatisfiedTypes().isEmpty()) {
Type et =
paramType.getExtendedType();
return et==null ? null :
et.getDeclaration();
}
else {
//TODO: Is this actually correct?
// What is Java's rule here?
Type st =
paramType.getSatisfiedTypes()
.get(0);
return st==null ? null :
st.getDeclaration();
}
}
else if (paramType.isUnion()) {
//TODO: this is very sucky, cos in theory a
// union might be assignable to the
// parameter type with a typecast
return unit.getObjectDeclaration();
}
else if (paramType.isIntersection()) {
List<Type> sts = paramType.getSatisfiedTypes();
if (sts.size()==2) {
//attempt to eliminate Basic from the
//intersection - very useful for anonymous
//classes, whose denotableType is often an
//intersection with Basic
Type first = sts.get(0);
Type second = sts.get(1);
if (first!=null && first.isBasic()) {
return erase(second, unit);
}
else if (second!=null && second.isBasic()) {
return erase(first, unit);
}
}
//TODO: this is very sucky, cos in theory an
// intersection might be assignable to the
// parameter type with a typecast
return unit.getObjectDeclaration();
}
else {
return paramType.getDeclaration();
}
}
/**
* Match the name of the given declaration to the given
* pattern. A name matches if:
*
* - it starts with the pattern, ignoring case, or
* - the pattern consists of all uppercase after the
* first character, and its uppercase "humps" match
* the pattern.
*/
public static boolean isNameMatching(
String startingWith, Declaration d) {
return isNameMatching(startingWith, d.getName());
}
public static boolean isNameMatching(
String startingWith, Import i) {
return isNameMatching(startingWith, i.getAlias());
}
public static boolean isNameMatching(
String startingWith, String name) {
if (startingWith==null || startingWith.isEmpty()) {
return true;
}
if (name==null || name.isEmpty()) {
return false;
}
int nameLength = name.length();
int startingWithLength = startingWith.length();
if (nameLength<startingWithLength) {
return false;
}
if (name.regionMatches(true,0,startingWith,0,startingWithLength)) {
return true;
}
int c = startingWith.codePointAt(0);
int d = name.codePointAt(0);
if (c!=d) {
return false;
}
//camel hump matching, starting from second character:
int i=1, j=1;
while (i<startingWithLength) {
if (j>=nameLength) {
return false;
}
while (i<startingWithLength &&
isLowerCase(c=startingWith.codePointAt(i))) {
d = name.codePointAt(j);
if (c==d) {
i+=charCount(c);
j+=charCount(d);
if (i>=startingWithLength) {
return true;
}
if (j>=nameLength) {
return false;
}
}
else {
return false;
}
}
while (j<nameLength &&
isLowerCase(d=name.codePointAt(j))) {
j+=charCount(d);
if (j>=nameLength) {
return false;
}
}
c = startingWith.codePointAt(i);
d = name.codePointAt(j);
i+=charCount(c);
j+=charCount(d);
if (d!=c) {
return false;
}
}
return true;
}
/**
* Given a declaration, a list of type arguments to the
* declaration, and a receiving type, collect together
* all interesting type arguments. The resulting map
* includes all type arguments from the receiving type
* and all its qualifying types. That's useful, because
* {@link Type.Substitution} works with a single
* aggregated map, and so for performance
* {@link Type#substitute(Type)} and
* {@link Type#substitute(TypedReference)}
* assume that the given type or reference holds such a
* single aggregated map.
*
* @return a map of type parameter to type argument
*
* @param declaration a declaration
* @param receivingType the receiving produced type
* of which the declaration is a member
* @param typeArguments all the explicit or inferred
* type arguments of the declaration, including
* those from qualifying types
*/
public static Map<TypeParameter,Type>
getTypeArgumentMap(Declaration declaration,
Type receivingType,
List<Type> typeArguments) {
List<TypeParameter> typeParameters =
getTypeParameters(declaration);
int count = countTypeParameters(receivingType,
typeParameters);
if (count==0) {
return EMPTY_TYPE_ARG_MAP;
}
else {
return aggregateTypeArguments(receivingType,
typeArguments, typeParameters, count);
}
}
private static Map<TypeParameter, Type>
aggregateTypeArguments(Type receivingType,
List<Type> typeArguments,
List<TypeParameter> typeParameters,
int count) {
Map<TypeParameter,Type> map =
new HashMap<TypeParameter,Type>
(count);
//make sure we collect all type arguments
//from the whole qualified type!
if (receivingType!=null) {
if (receivingType.isIntersection()) {
for (Type dt:
receivingType.getSatisfiedTypes()) {
while (dt!=null) {
map.putAll(dt.getTypeArguments());
dt = dt.getQualifyingType();
}
}
}
else {
Type dt = receivingType;
while (dt!=null) {
map.putAll(dt.getTypeArguments());
dt = dt.getQualifyingType();
}
}
}
if (typeArguments!=null) {
//now turn the type argument tuple into a
//map from type parameter to argument
for (int i=0;
i<typeParameters.size() &&
i<typeArguments.size();
i++) {
map.put(typeParameters.get(i),
typeArguments.get(i));
}
}
return map;
}
public static Map<TypeParameter,SiteVariance>
getVarianceMap(Declaration declaration,
Type receivingType,
List<SiteVariance> variances) {
if (variances==null) {
return EMPTY_VARIANCE_MAP;
}
else {
List<TypeParameter> typeParameters =
getTypeParameters(declaration);
int count = countTypeParameters(receivingType,
typeParameters);
if (count==0) {
return EMPTY_VARIANCE_MAP;
}
else {
return aggregateVariances(receivingType,
variances, typeParameters);
}
}
}
private static Map<TypeParameter, SiteVariance>
aggregateVariances(Type receivingType,
List<SiteVariance> variances,
List<TypeParameter> typeParameters) {
Map<TypeParameter,SiteVariance> map =
new HashMap<TypeParameter,SiteVariance>();
//make sure we collect all type arguments
//from the whole qualified type!
if (receivingType!=null) {
if (receivingType.isIntersection()) {
for (Type dt:
receivingType.getSatisfiedTypes()) {
while (dt!=null) {
map.putAll(dt.getVarianceOverrides());
dt = dt.getQualifyingType();
}
}
}
else {
Type dt = receivingType;
while (dt!=null) {
map.putAll(dt.getVarianceOverrides());
dt = dt.getQualifyingType();
}
}
}
for (int i=0;
i<typeParameters.size() &&
i<variances.size();
i++) {
SiteVariance var = variances.get(i);
if (var!=null) {
map.put(typeParameters.get(i), var);
}
}
return map;
}
private static int countTypeParameters(
Type receivingType,
List<TypeParameter> typeParameters) {
int count = typeParameters.size();
//make sure we count all type arguments
//from the whole qualified type!
if (receivingType!=null) {
if (receivingType.isIntersection()) {
for (Type dt:
receivingType.getSatisfiedTypes()) {
while (dt!=null) {
count += dt.getTypeArguments().size();
dt = dt.getQualifyingType();
}
}
}
else {
Type dt = receivingType;
while (dt!=null) {
count += dt.getTypeArguments().size();
dt = dt.getQualifyingType();
}
}
}
return count;
}
public static List<TypeParameter> getTypeParameters(
Declaration declaration) {
if (declaration instanceof Generic) {
Generic g = (Generic) declaration;
return g.getTypeParameters();
}
else {
return emptyList();
}
}
static <T> List<T> list(List<T> list, T element) {
List<T> result = new ArrayList<T>(list.size()+1);
result.addAll(list);
result.add(element);
return result;
}
/**
* Helper method for eliminating duplicate types from
* lists of types that form a union type, taking into
* account that a subtype is a "duplicate" of its
* supertype.
*/
public static void addToUnion(List<Type> list,
Type pt) {
if (pt==null ||
!list.isEmpty() &&
pt.isNothing()) {
return;
}
else if (pt.isAnything()) {
list.clear();
list.add(pt);
}
else if (pt.isUnion()) {
List<Type> caseTypes =
pt.getCaseTypes();
for ( int i=0, size=caseTypes.size();
i<size; i++ ) {
Type t = caseTypes.get(i);
addToUnion(list, t.substitute(pt));
}
}
else if (pt.isWellDefined()) {
boolean add=true;
for (int i=0; i<list.size(); i++) {
Type t = list.get(i);
if (pt.isSubtypeOf(t)) {
add=false;
break;
}
else if (pt.isSupertypeOf(t)) {
list.remove(i);
i--; // redo this index
}
}
if (add) {
list.add(pt);
}
}
}
/**
* Helper method for eliminating duplicate types from
* lists of types that form an intersection type, taking
* into account that a supertype is a "duplicate" of its
* subtype.
*/
public static void addToIntersection(List<Type> list,
Type type, Unit unit) {
if (type==null ||
!list.isEmpty() &&
type.isAnything()) {
return;
}
else if (type.isNothing()) {
list.clear();
list.add(type);
}
else if (type.isIntersection()) {
List<Type> satisfiedTypes =
type.getSatisfiedTypes();
for (int i=0,
size=satisfiedTypes.size();
i<size; i++) {
Type t = satisfiedTypes.get(i);
addToIntersection(list, t, unit);
}
}
else {
if (type.isWellDefined()) {
TypeDeclaration dec = type.getDeclaration();
for (int i=0; i<list.size(); i++) {
Type t = list.get(i);
if (t.isSubtypeOf(type)) {
return;
}
else if (type.isSubtypeOf(t)) {
list.remove(i);
i--; // redo this index
}
else if (disjoint(type, t, unit)) {
list.clear();
list.add(unit.getNothingType());
return;
}
else {
if (type.isClassOrInterface() &&
t.isClassOrInterface() &&
t.getDeclaration().equals(dec) &&
!type.containsUnknowns() &&
!t.containsUnknowns()) {
//canonicalize a type of form
//T<InX,OutX>&T<InY,OutY> to
//T<InX|InY,OutX&OutY>
Type pi =
principalInstantiation(
dec, type, t,
unit);
if (!pi.containsUnknowns()) {
list.remove(i);
list.add(pi);
return;
}
}
}
}
if (list.size()>1) {
//it is possible to have a type that is
//a supertype of the intersection, even
//though it is not a supertype of any of
//the intersected types!
Type t = canonicalIntersection(list, unit);
if (type.isSupertypeOf(t)) {
return;
}
}
list.add(type);
}
}
}
/**
* Are the given types disjoint?
*
* @param p the first type
* @param q the second type
* enumerated type are disjoint
* @param unit
*
* @return true if the types are disjoint
*/
private static boolean disjoint(Type p, Type q,
Unit unit) {
if (q.getDeclaration()
.isDisjoint(p.getDeclaration())) {
return true;
}
else {
//we have to resolve aliases here, or computing
//supertype declarations gets incredibly slow
//for the big stack of union type aliases in
//ceylon.ast
Type ps = p.resolveAliases();
Type qs = q.resolveAliases();
return emptyMeet(ps, qs, unit) ||
hasEmptyIntersectionOfInvariantInstantiations(ps, qs);
}
}
/**
* implement the rule that Foo&Bar==Nothing if
* here exists some enumerated type Baz with
*
* Baz of Foo | Bar
*
* (the intersection of disjoint types is empty)
*
* @param type a type which might be disjoint from
* a list of other given types
* @param list the list of other types
* @param unit
*
* @return true of the given type was disjoint from
* the given list of types
*/
/*private static boolean reduceIfDisjoint(Type type,
List<Type> list, Unit unit) {
if (list.isEmpty()) {
return false;
}
TypeDeclaration typeDec = type.getDeclaration();
List<TypeDeclaration> supertypes =
typeDec.getSupertypeDeclarations();
for (int i=0, l=supertypes.size(); i<l; i++) {
TypeDeclaration supertype =
supertypes.get(i);
List<Type> cts = supertype.getCaseTypes();
if (cts!=null) {
TypeDeclaration ctd=null;
for (int cti=0,
ctl=cts.size();
cti<ctl;
cti++) {
TypeDeclaration ct =
cts.get(cti)
.getDeclaration();
if (typeDec.inherits(ct)) {
ctd = ct;
break;
}
}
if (ctd!=null) {
for (int cti=0, ctl=cts.size();
cti<ctl;
cti++) {
TypeDeclaration ct =
cts.get(cti)
.getDeclaration();
if (ct!=ctd) {
for (int ti=0,
tl=list.size();
ti<tl;
ti++) {
Type t = list.get(ti);
if (t.getDeclaration()
.inherits(ct)) {
return true;
}
}
}
}
}
}
}
return false;
}*/
/**
* The meet of two classes unrelated by inheritance,
* or of Null with an interface type is empty. The meet
* of an anonymous class with a type to which it is not
* assignable is empty.
*/
private static boolean emptyMeet(
Type p, Type q, Unit unit) {
if (p==null || q==null) {
return false;
}
if (p.isNothing() || q.isNothing()) {
return true;
}
TypeDeclaration pd = p.getDeclaration();
TypeDeclaration qd = q.getDeclaration();
if (p.isTypeParameter()) {
p = canonicalIntersection(
p.getSatisfiedTypes(),
unit);
pd = p.getDeclaration();
}
if (q.isTypeParameter()) {
q = canonicalIntersection(
q.getSatisfiedTypes(),
unit);
qd = q.getDeclaration();
}
if (q.isIntersection()) {
for (Type t: q.getSatisfiedTypes()) {
if (emptyMeet(p,t,unit)) {
return true;
}
}
return false;
}
if (p.isIntersection()) {
for (Type t: p.getSatisfiedTypes()) {
if (emptyMeet(q,t,unit)) {
return true;
}
}
return false;
}
if (q.isUnion()) {
for (Type t: q.getCaseTypes()) {
if (!emptyMeet(p,t,unit)) {
return false;
}
}
return true;
}
else if (qd.getCaseTypes()!=null) {
boolean all = true;
for (Type t: qd.getCaseTypes()) {
if (t.getDeclaration().isSelfType() ||
!emptyMeet(p,t,unit)) {
all = false;
break;
}
}
if (all) return true;
}
if (p.isUnion()) {
for (Type t: p.getCaseTypes()) {
if (!emptyMeet(q,t,unit)) {
return false;
}
}
return true;
}
else if (p.getCaseTypes()!=null) {
boolean all = true;
for (Type t: pd.getCaseTypes()) {
if (t.getDeclaration().isSelfType() ||
!emptyMeet(q,t,unit)) {
all = false;
break;
}
}
if (all) return true;
}
if (p.isClass() && q.isClass() ||
p.isInterface() && q.isNull() ||
q.isInterface() && p.isNull()) {
if (!qd.inherits(pd) &&
!pd.inherits(qd)) {
return true;
}
}
if (pd.isFinal()) {
if (pd.getTypeParameters().isEmpty() &&
!q.involvesTypeParameters() &&
!p.isSubtypeOf(q) &&
!(qd instanceof UnknownType)) {
return true;
}
if (q.isClassOrInterface() &&
!pd.inherits(qd)) {
return true;
}
}
if (qd.isFinal()) {
if (qd.getTypeParameters().isEmpty() &&
!p.involvesTypeParameters() &&
!q.isSubtypeOf(p) &&
!(p.isUnknown())) {
return true;
}
if (p.isClassOrInterface() &&
!qd.inherits(pd)) {
return true;
}
}
// Interface ed = unit.getEmptyDeclaration();
// Interface id = unit.getIterableDeclaration();
// if (pd.inherits(ed) && qd.inherits(id) &&
// unit.isNonemptyIterableType(q) ||
// pd.inherits(id) && qd.inherits(ed) &&
// unit.isNonemptyIterableType(p)) {
// return true;
Interface st = unit.getSequentialDeclaration();
if (q.isClassOrInterface() &&
pd.inherits(st) && !qd.inherits(st) &&
!st.inherits(qd) ||
p.isClassOrInterface() &&
qd.inherits(st) && !pd.inherits(st) &&
!st.inherits(pd) &&
!p.involvesTypeParameters()) {
return true;
}
Interface nst = unit.getSequenceDeclaration();
if (pd.inherits(nst) && qd.inherits(st) ||
qd.inherits(nst) && pd.inherits(st)) {
Type pet = unit.getSequentialElementType(p);
Type qet = unit.getSequentialElementType(q);
if (emptyMeet(pet, qet, unit)) {
return true;
}
}
Class td = unit.getTupleDeclaration();
if (pd.inherits(td) && qd.inherits(td)) {
List<Type> pal = p.getTypeArgumentList();
List<Type> qal = q.getTypeArgumentList();
if (pal.size()>=3 && qal.size()>=3) {
if (emptyMeet(pal.get(1), qal.get(1), unit) ||
emptyMeet(pal.get(2), qal.get(2), unit)) {
return true;
}
}
}
if (pd.inherits(td) && qd.inherits(st)) {
List<Type> pal = p.getTypeArgumentList();
Type qet = unit.getSequentialElementType(q);
if (pal.size()>=3) {
if (emptyMeet(pal.get(1), qet, unit) ||
emptyMeet(pal.get(2),
unit.getSequentialType(qet),
unit)) {
return true;
}
}
}
if (qd.inherits(td) && pd.inherits(st)) {
List<Type> qal = q.getTypeArgumentList();
Type pet = unit.getSequentialElementType(p);
if (qal.size()>=3) {
if (emptyMeet(qal.get(1), pet, unit) ||
emptyMeet(qal.get(2),
unit.getSequentialType(pet),
unit)) {
return true;
}
}
}
return false;
}
/**
* Given two instantiations of a qualified type
* constructor, determine the qualifying type of the
* principal instantiation of that type constructor for
* the intersection of the two types.
*
* @param p the first instantiation
* @param q the second instantiation
* @param td the type constructor
*/
static Type principalQualifyingType(
Type p, Type q, Declaration td, Unit unit) {
Type pqt = p.getQualifyingType();
Type qqt = q.getQualifyingType();
Scope tdc = td.getContainer();
if (pqt!=null && qqt!=null) {
if (tdc instanceof TypeDeclaration) {
TypeDeclaration qtd = (TypeDeclaration) tdc;
Type pst = pqt.getSupertype(qtd);
Type qst = qqt.getSupertype(qtd);
if (pst!=null && qst!=null) {
return principalInstantiation(qtd, pst, qst,
unit);
}
}
else {
if (pqt.isExactly(qqt)) {
return pqt;
}
}
}
return null;
}
/**
* Determine if a type of form X<P>&X<Q> is equivalent
* to Nothing where X<T> is invariant in T.
*
* @param p the argument type P
* @param q the argument type Q
*/
private static boolean hasEmptyIntersectionOfInvariantInstantiations(
Type p, Type q) {
List<TypeDeclaration> pstds =
p.getDeclaration()
.getSupertypeDeclarations();
List<TypeDeclaration> qstds =
q.getDeclaration()
.getSupertypeDeclarations();
Set<TypeDeclaration> set =
new HashSet<TypeDeclaration>
(pstds.size()+qstds.size());
set.addAll(pstds);
set.retainAll(qstds);
for (TypeDeclaration std: pstds) {
Type pst = null;
Type qst = null;
for (TypeParameter tp: std.getTypeParameters()) {
if (tp.isInvariant()) {
if (pst==null) {
pst = p.getSupertype(std);
}
if (qst==null) {
qst = q.getSupertype(std);
}
if (pst!=null && qst!=null) {
Type psta =
pst.getTypeArguments()
.get(tp);
Type qsta =
qst.getTypeArguments()
.get(tp);
//TODO: why isWellDefined() instead of isTypeUnknown() ?
if (psta!=null &&
psta.isWellDefined() &&
!pst.involvesTypeParameters() &&
qsta!=null &&
qsta.isWellDefined() &&
!qst.involvesTypeParameters()) {
boolean psti =
pst.isInvariant(tp);
boolean pstcov =
pst.isCovariant(tp);
boolean pstcontra =
pst.isContravariant(tp);
boolean qsti =
qst.isInvariant(tp);
boolean qstcov =
qst.isCovariant(tp);
boolean qstcontra =
qst.isContravariant(tp);
if (psti && qsti &&
!psta.isExactly(qsta) ||
pstcov && qsti &&
!qsta.isSubtypeOf(psta) ||
qstcov && psti &&
!psta.isSubtypeOf(qsta) ||
pstcontra && qsti &&
!psta.isSubtypeOf(qsta) ||
qstcontra && psti &&
!qsta.isSubtypeOf(psta)) {
return true;
}
}
}
}
}
}
return false;
}
public static String formatPath(List<String> path,
char separator) {
StringBuilder sb = new StringBuilder();
for (int i=0; i<path.size(); i++) {
String pathPart = path.get(i);
if (! pathPart.isEmpty()) {
sb.append(pathPart);
if (i<path.size()-1) sb.append(separator);
}
}
return sb.toString();
}
public static String formatPath(List<String> path) {
return formatPath(path, '.');
}
/**
* Form the union of the given types, eliminating
* duplicates.
*/
public static Type unionType(
Type lhst, Type rhst, Unit unit) {
List<Type> list = new ArrayList<Type>(2);
addToUnion(list, rhst);
addToUnion(list, lhst);
UnionType ut = new UnionType(unit);
ut.setCaseTypes(list);
return ut.getType();
}
/**
* Form the intersection of the given types,
* canonicalizing, and eliminating duplicates.
*/
public static Type intersectionType(
Type lhst, Type rhst, Unit unit) {
Type simpleIntersection =
getSimpleIntersection(lhst, rhst);
if (simpleIntersection != null) {
return simpleIntersection;
}
List<Type> list = new ArrayList<Type>(2);
addToIntersection(list, rhst, unit);
addToIntersection(list, lhst, unit);
IntersectionType it = new IntersectionType(unit);
it.setSatisfiedTypes(list);
return it.canonicalize().getType();
}
/**
* Form the union of the given types, without
* eliminating duplicates.
*/
public static Type union(
List<Type> types, Unit unit) {
if (types.size()==1) {
return types.get(0);
}
UnionType ut = new UnionType(unit);
ut.setCaseTypes(types);
return ut.getType();
}
/**
* Form the intersection of the given types, without
* eliminating duplicates nor canonicalizing.
*/
public static Type intersection(
List<Type> types, Unit unit) {
if (types.size()==1) {
return types.get(0);
}
IntersectionType it = new IntersectionType(unit);
it.setSatisfiedTypes(types);
return it.getType();
}
/**
* Form the canonical intersection of the given types,
* without eliminating duplicates.
*/
public static Type canonicalIntersection(
List<Type> types, Unit unit) {
if (types.size()==1) {
return types.get(0);
}
IntersectionType it = new IntersectionType(unit);
it.setSatisfiedTypes(types);
return it.canonicalize().getType();
}
private static Type getSimpleIntersection(
Type a, Type b) {
if (a == null || b == null) {
return null;
}
TypeDeclaration ad = a.getDeclaration();
TypeDeclaration bd = b.getDeclaration();
if (ad == null || bd == null) {
return null;
}
if (!a.isClassOrInterface()) {
if (a.isUnion() && b.isClassOrInterface()) {
return getSimpleIntersection(b,
(ClassOrInterface) bd, a);
}
return null;
}
else if (!b.isClassOrInterface()) {
// here aDecl MUST BE a ClassOrInterface as per flow
if (b.isUnion()) {
return getSimpleIntersection(a,
(ClassOrInterface) ad, b);
}
return null;
}
String an = ad.getQualifiedNameString();
String bn = bd.getQualifiedNameString();
if (an.equals(bn)
&& ad.getTypeParameters().isEmpty()
&& bd.getTypeParameters().isEmpty())
return a;
if (a.isAnything()) {
// everything is an Anything
return b;
}
if (b.isAnything()) {
// everything is an Anything
return a;
}
if (a.isObject()) {
// every ClassOrInterface is an object except Null
if (b.isNull() || b.isNullValue()) {
return ad.getUnit().getNothingType();
}
else {
return b;
}
}
if (b.isObject()) {
// every ClassOrInterface is an object except Null
if (a.isNull() || a.isNullValue()) {
return bd.getUnit().getNothingType();
}
else {
return a;
}
}
if (a.isNull()) {
// only null is null
if (b.isNull() || b.isNullValue()) {
return b;
}
else {
return ad.getUnit().getNothingType();
}
}
if (b.isNull()) {
// only null is null
if (a.isNull() || a.isNullValue()) {
return a;
}
else {
return bd.getUnit().getNothingType();
}
}
// not simple
return null;
}
private static Type getSimpleIntersection(
Type a, ClassOrInterface aDecl,
Type b) {
// we only handle Foo|Null
if (b.getCaseTypes().size() != 2) {
return null;
}
// we only handle Object and Null intersections
boolean aIsObject = a.isObject();
boolean aIsNull = a.isNull();
if (!aIsObject && !aIsNull) {
return null;
}
Type caseA = b.getCaseTypes().get(0);
Type caseB = b.getCaseTypes().get(1);
boolean isANull = caseA.isNull();
boolean isBNull = caseB.isNull();
if (aIsObject) {
if (isANull) {
return simpleObjectIntersection(aDecl, caseB);
}
if (isBNull) {
return simpleObjectIntersection(aDecl, caseA);
}
// too complex
return null;
}
if (aIsNull) {
if (isANull) {
return caseA;
}
if (isBNull) {
return caseB;
}
// too complex
return null;
}
// too complex
return null;
}
private static Type simpleObjectIntersection(
ClassOrInterface objectDecl, Type type) {
if (type.isClassOrInterface()) {
return type;
}
else if (type.isTypeParameter()) {
List<Type> satisfiedTypes =
type.getSatisfiedTypes();
if (satisfiedTypes.isEmpty()) {
// trivial intersection TP&Object
Unit unit = objectDecl.getUnit();
List<Type> types = new ArrayList<Type>(2);
types.add(type);
types.add(objectDecl.getType());
return canonicalIntersection(types, unit);
}
for (Type sat: satisfiedTypes) {
if (sat.isObject()) {
// it is already an Object
return type;
}
}
// too complex
return null;
}
// too complex
return null;
}
public static boolean isElementOfUnion(
Type unionType,
ClassOrInterface ci) {
for (Type ct: unionType.getCaseTypes()) {
if (ct.isClassOrInterface() &&
ct.getDeclaration().equals(ci)) {
return true;
}
}
return false;
}
/**
* Find the member which best matches the given signature
* among the given list of members. In the case that
* there are multiple matching declarations, attempt to
* return the "best" match, according to some ad-hoc
* rules that roughly follow how Java resolves
* overloaded methods.
*
* @param members a list of members to search
* @param name the name of the member to find
* @param signature the parameter types to match, or
* null if we're not matching on parameter types
* @param ellipsis true of we want to find a declaration
* which supports varags, or false otherwise
*
* @return the best matching declaration
*/
public static Declaration lookupMember(
List<Declaration> members, String name,
List<Type> signature, boolean ellipsis) {
List<Declaration> results = null;
Declaration result = null;
Declaration inexactMatch = null;
for (int i=0, size=members.size(); i<size ; i++) {
Declaration d = members.get(i);
if (isResolvable(d) && isNamed(name, d)) {
if (signature==null) {
//no argument types: either a type
//declaration, an attribute, or a method
//reference - don't return overloaded
//forms of the declaration (instead
//return the "abstraction" of them)
if (notOverloaded(d)) {
return d;
}
}
else {
if (notOverloaded(d)) {
//we have found either a non-overloaded
//declaration, or the "abstraction"
//which of all the overloaded forms
//of the declaration
//Note: I could not do this optimization
// because then it could not distinguish
// between Java open() and isOpen()
/*if (!isAbstraction(d)) {
return d;
}*/
inexactMatch = d;
}
if (hasMatchingSignature(d, signature, ellipsis)) {
//we have found an exactly matching
//overloaded declaration
if (result == null) {
result = d; // first match
}
else {
// more than one match, move to array
if (results == null) {
results = new ArrayList<Declaration>(2);
results.add(result);
}
addIfBetterMatch(results, d, signature);
}
}
}
}
}
// if we never needed a results array
if (results == null) {
// single result
if (result != null) {
return result;
}
// no exact match
return inexactMatch;
}
switch (results.size()) {
case 0:
//no exact match, so return the non-overloaded
//declaration or the "abstraction" of the
//overloaded declaration
return inexactMatch;
case 1:
//exactly one exact match, so return it
return results.get(0);
default:
//more than one matching overloaded declaration,
//so return the "abstraction" of the overloaded
//declaration
return inexactMatch;
}
}
private static void addIfBetterMatch(
List<Declaration> results, Declaration d,
List<Type> signature) {
boolean add=true;
for (Iterator<Declaration> i = results.iterator();
i.hasNext();) {
Declaration o = i.next();
if (betterMatch(d, o, signature)) {
i.remove();
}
else if (betterMatch(o, d, signature)) { //TODO: note asymmetry here resulting in nondeterminate behavior!
add=false;
}
}
if (add) results.add(d);
}
public static Declaration findMatchingOverloadedClass(
Class abstractionClass,
List<Type> signature, boolean ellipsis) {
List<Declaration> results =
new ArrayList<Declaration>(1);
if (!abstractionClass.isAbstraction()) {
return abstractionClass;
}
for (Declaration overloaded:
abstractionClass.getOverloads()) {
if (hasMatchingSignature(overloaded,
signature, ellipsis, false)) {
addIfBetterMatch(results,
overloaded, signature);
}
}
if (results.size() == 1) {
return results.get(0);
}
return abstractionClass;
}
public static boolean isTypeUnknown(Type type) {
return type==null || type.getDeclaration()==null ||
type.containsUnknowns();
}
public static List<Type> getSignature(
Declaration dec) {
if (!(dec instanceof Functional)) {
return null;
}
Functional fun = (Functional) dec;
List<ParameterList> parameterLists =
fun.getParameterLists();
if (parameterLists == null ||
parameterLists.isEmpty()) {
return null;
}
ParameterList parameterList =
parameterLists.get(0);
if (parameterList == null) {
return null;
}
List<Parameter> parameters =
parameterList.getParameters();
if (parameters == null) {
return null;
}
List<Type> signature =
new ArrayList<Type>
(parameters.size());
Unit unit = dec.getUnit();
for (Parameter param: parameters) {
FunctionOrValue model = param.getModel();
Type t =
model==null ?
unit.getUnknownType() :
model.getType();
signature.add(t);
}
return signature;
}
public static boolean isCompletelyVisible(
Declaration member, Type pt) {
if (pt.isUnion()) {
for (Type ct: pt.getCaseTypes()) {
if (!isCompletelyVisible(member,
ct.substitute(pt))) {
return false;
}
}
return true;
}
else if (pt.isIntersection()) {
for (Type ct: pt.getSatisfiedTypes()) {
if (!isCompletelyVisible(member,
ct.substitute(pt))) {
return false;
}
}
return true;
}
else {
if (!isVisible(member, pt.getDeclaration())) {
return false;
}
for (Type at: pt.getTypeArgumentList()) {
if (at!=null &&
!isCompletelyVisible(member, at)) {
return false;
}
}
return true;
}
}
static boolean isVisible(Declaration member,
TypeDeclaration type) {
return type instanceof TypeParameter ||
type.isVisible(member.getVisibleScope()) &&
(member.getVisibleScope()!=null ||
!member.getUnit().getPackage().isShared() ||
type.getUnit().getPackage().isShared());
}
/**
* Given two instantiations of the same type constructor,
* construct a principal instantiation that is a supertype
* of both. This is impossible in the following special
* cases:
*
* - an abstract class which does not obey the principal
* instantiation inheritance rule
* - an intersection between two instantiations of the
* same type where one argument is a type parameter
*
* Nevertheless, we give it our best shot!
*
* @param dec the type constructor
* @param first the first instantiation
* @param second the second instantiation
*
*/
public static Type principalInstantiation(
TypeDeclaration dec,
Type first, Type second,
Unit unit) {
List<TypeParameter> tps = dec.getTypeParameters();
List<Type> args = new ArrayList<Type>(tps.size());
Map<TypeParameter,SiteVariance> varianceOverrides =
new HashMap<TypeParameter,SiteVariance>(1);
for (TypeParameter tp: tps) {
Type firstArg =
first.getTypeArguments().get(tp);
Type secondArg =
second.getTypeArguments().get(tp);
Type arg;
if (firstArg==null || secondArg==null) {
arg = unit.getUnknownType();
}
else {
boolean firstCo = first.isCovariant(tp);
boolean secondCo = second.isCovariant(tp);
boolean firstContra = first.isContravariant(tp);
boolean secondContra = second.isContravariant(tp);
boolean firstInv = !firstCo && !firstContra;
boolean secondInv = !secondCo && !secondContra;
boolean parameterized =
firstArg.involvesTypeParameters() ||
secondArg.involvesTypeParameters();
if (firstContra && secondContra) {
arg = unionType(
firstArg, secondArg, unit);
if (!tp.isContravariant()) {
varianceOverrides.put(tp, IN);
}
}
else if (firstCo && secondCo) {
arg = intersectionType(
firstArg, secondArg, unit);
if (!tp.isCovariant()) {
varianceOverrides.put(tp, OUT);
}
}
else if (firstContra && secondInv) {
if (firstArg.isSubtypeOf(secondArg)) {
arg = secondArg;
}
else if (parameterized) {
//irreconcilable instantiations
arg = unit.getUnknownType();
}
else {
return unit.getNothingType();
}
}
else if (firstCo && secondInv) {
if (secondArg.isSubtypeOf(firstArg)) {
arg = secondArg;
}
else if (parameterized) {
//irreconcilable instantiations
arg = unit.getUnknownType();
}
else {
return unit.getNothingType();
}
}
else if (secondCo && firstInv) {
if (firstArg.isSubtypeOf(secondArg)) {
arg = firstArg;
}
else if (parameterized) {
//irreconcilable instantiations
arg = unit.getUnknownType();
}
else {
return unit.getNothingType();
}
}
else if (secondContra && firstInv) {
if (secondArg.isSubtypeOf(firstArg)) {
arg = firstArg;
}
else if (parameterized) {
//irreconcilable instantiations
arg = unit.getUnknownType();
}
else {
return unit.getNothingType();
}
}
else if (firstInv && secondInv) {
if (firstArg.isExactly(secondArg)) {
arg = firstArg;
}
else if (parameterized) {
//type parameters that might represent
//equivalent types at runtime,
//irreconcilable instantiations
//TODO: detect cases where we know for
// sure that the types are disjoint
// because the type parameters only
// occur as type args
arg = unit.getUnknownType();
}
else {
//the type arguments are distinct, and the
//intersection is Nothing, so there is
//no reasonable principal instantiation
return unit.getNothingType();
}
}
else {
//opposite variances
//irreconcilable instantiations
arg = unit.getUnknownType();
}
}
args.add(arg);
}
Type result =
dec.appliedType(principalQualifyingType(
first, second, dec, unit),
args);
result.setVarianceOverrides(varianceOverrides);
return result;
}
public static boolean areConsistentSupertypes(
Type st1, Type st2, Unit unit) {
//can't inherit two instantiations of an invariant type
//Note: I don't think we need to check type parameters of
// the qualifying type, since you're not allowed to
// subtype an arbitrary instantiation of a nested
// type - only supertypes of the outer type
// Nor do we need to check variance overrides since
// supertypes can't have them.
List<TypeParameter> typeParameters =
st1.getDeclaration().getTypeParameters();
for (TypeParameter tp: typeParameters) {
if (!tp.isCovariant() && !tp.isContravariant()) {
Type ta1 = st1.getTypeArguments().get(tp);
Type ta2 = st2.getTypeArguments().get(tp);
if (ta1!=null && ta2!=null &&
!ta1.isExactly(ta2)) {
return false;
}
}
}
return !intersectionType(st1, st2, unit).isNothing();
}
/**
* The intersection of the types inherited of the given
* declaration. No need to worry about canonicalization
* because:
*
* 1. an inherited type can't be a union, and
* 2. they are prevented from being disjoint types.
*/
public static Type intersectionOfSupertypes(
TypeDeclaration td) {
Type extendedType = td.getExtendedType();
List<Type> satisfiedTypes = td.getSatisfiedTypes();
List<Type> list =
new ArrayList<Type>
(satisfiedTypes.size()+1);
if (extendedType!=null) {
list.add(extendedType);
}
list.addAll(satisfiedTypes);
Unit unit = td.getUnit();
IntersectionType it = new IntersectionType(unit);
it.setSatisfiedTypes(list);
return it.getType();
}
/**
* The union of the case types of the given declaration.
*/
public static Type unionOfCaseTypes(
TypeDeclaration td) {
List<Type> caseTypes = td.getCaseTypes();
Unit unit = td.getUnit();
if (caseTypes==null) {
return unit.getAnythingType();
}
List<Type> list =
new ArrayList<Type>
(caseTypes.size()+1);
list.addAll(caseTypes);
UnionType it = new UnionType(unit);
it.setCaseTypes(list);
return it.getType();
}
public static int addHashForModule(int ret, Declaration decl) {
Module module = getModule(decl);
return (37 * ret) +
(module != null ? module.hashCode() : 0);
}
public static boolean sameModule(Declaration a, Declaration b) {
Module aMod = getModule(a);
Module bMod = getModule(b);
return aMod.equals(bMod);
}
public static void clearProducedTypeCache(TypeDeclaration decl) {
Module module = getModule(decl);
if(module != null){
module.clearCache(decl);
}
}
public static List<Declaration> getInterveningRefinements(
String name, List<Type> signature,
Declaration root,
TypeDeclaration bottom, TypeDeclaration top) {
List<Declaration> result =
new ArrayList<Declaration>(2);
for (TypeDeclaration std:
bottom.getSupertypeDeclarations()) {
if (std.inherits(top) && !std.equals(bottom)) {
Declaration member =
std.getDirectMember(name,
signature, false);
if (member!=null &&
member.isShared() &&
!isAbstraction(member)) {
TypeDeclaration td =
(TypeDeclaration)
member.getContainer();
Declaration refined =
td.getRefinedMember(name,
signature, false);
if (refined!=null &&
refined.equals(root)) {
result.add(member);
}
}
}
}
return result;
}
public static List<Declaration> getInheritedDeclarations(
String name, TypeDeclaration bottom) {
List<Declaration> result =
new ArrayList<Declaration>(2);
for (TypeDeclaration std:
bottom.getSupertypeDeclarations()) {
if (!std.equals(bottom)) {
Declaration member =
std.getDirectMember(name,
null, false);
if (member!=null &&
member.isShared() &&
!isAbstraction(member)) {
result.add(member);
}
}
}
return result;
}
/**
* Is the given declaration a constructor or singleton
* constructor of a toplevel class?
*
* Constructors of toplevel classes can be directly
* imported into the toplevel namespace of a compilation
* unit.
*/
public static boolean isToplevelClassConstructor(
TypeDeclaration td, Declaration dec) {
return td.isToplevel() &&
(dec instanceof Constructor ||
dec instanceof FunctionOrValue &&
((FunctionOrValue) dec).getTypeDeclaration()
instanceof Constructor);
}
/**
* Is the given declaration a toplevel anonymous class?
*
* Members of toplevel anonymous classes can be directly
* imported into the toplevel namespace of a compilation
* unit.
*/
public static boolean isToplevelAnonymousClass(Scope s) {
if (s instanceof Class) {
Class td = (Class) s;
return td.isAnonymous() && td.isToplevel();
}
else {
return false;
}
}
public static boolean isNative(Declaration dec) {
return dec != null && dec.isNative();
}
public static boolean isNativeHeader(Declaration dec) {
return dec != null && dec.isNativeHeader();
}
public static boolean isNativeImplementation(Declaration dec) {
return dec != null && dec.isNative() && !dec.isNativeHeader();
}
public static boolean isInNativeContainer(Declaration dec) {
Scope container = dec.getContainer();
if (container instanceof Declaration) {
Declaration d = (Declaration) container;
return d.isNative();
}
return false;
}
public static Declaration getNativeDeclaration(
Declaration decl, Backend backend) {
return getNativeDeclaration(decl,
backend == null ? null :
backend.backendSupport);
}
public static Declaration getNativeDeclaration(
Declaration dec, BackendSupport backendSupport) {
if (dec.isNative() &&
backendSupport != null) {
Declaration abstraction = null;
if (backendSupport.supportsBackend(
Backend.fromAnnotation(
dec.getNativeBackend()))) {
abstraction = dec;
}
else {
List<Declaration> overloads =
dec.getOverloads();
if (overloads != null) {
for (Declaration d: overloads) {
if (backendSupport.supportsBackend(
Backend.fromAnnotation(
d.getNativeBackend()))) {
abstraction = d;
break;
}
}
}
}
return abstraction;
}
else {
return dec;
}
}
/**
* The list of type parameters of the given generic
* declaration as types. (As viewed within the body of
* the generic declaration.)
*
* @param dec a generic declaration or type constructor
* @return a list of types of its type parameters
*
* @see Declaration#getTypeParametersAsArguments
*/
public static List<Type> typeParametersAsArgList(Generic dec) {
List<TypeParameter> params =
dec.getTypeParameters();
if (params.isEmpty()) {
return NO_TYPE_ARGS;
}
int size = params.size();
List<Type> paramsAsArgs =
new ArrayList<Type>(size);
for (int i=0; i<size; i++) {
TypeParameter param = params.get(i);
paramsAsArgs.add(param.getType());
}
return paramsAsArgs;
}
/**
* Find the declaration with the given name that occurs
* directly in the given scope, taking into account the
* given backend, if any. Does not take into account
* Java overloading
*
* @param scope any scope
* @param name the name of a declaration occurring
* directly in the scope, and not overloaded
* @param backend the native backend name
*
* @return the matching declaration
*/
public static Declaration lookupMemberForBackend(
List<Declaration> members, String name, String backend) {
for (Declaration dec: members) {
if (isResolvable(dec) && isNamed(name, dec)) {
String nat = dec.getNativeBackend();
if (nat==null) {
return dec;
}
else {
if (nat.equals(backend)) {
return dec;
}
}
}
}
return null;
}
public static Declaration getNativeHeader(Declaration dec) {
return getNativeHeader(dec.getContainer(), dec.getName());
}
/**
* Find the header with the given name that occurs
* directly in the given scope or if that scope is
* itself a native implementation first look up
* the scope's native header and find the requested
* header there.
*
* @param scope any scope
* @param name the name of a declaration
*
* @return the matching declaration
*/
public static Declaration getNativeHeader(Scope container, String name) {
if (container instanceof Declaration) {
Declaration cd = (Declaration)container;
if (cd.isNative() && !cd.isNativeHeader()) {
// The container is a native implementation so
// we first need to find _its_ header
Scope c =
(Scope)cd.getContainer().getDirectMemberForBackend(
cd.getName(),
Backend.None.nativeAnnotation);
if (c != null) {
// Is this the Value part of an object?
if (c instanceof Value && isObject((Value)c)) {
// Then use the Class part as the container
c = ((Value)c).getType().getDeclaration();
}
container = c;
}
}
}
// Find the header
Declaration header =
container.getDirectMemberForBackend(
name,
Backend.None.nativeAnnotation);
return header;
}
// Check if the Value is part of an object
public static boolean isObject(Value v) {
Type type = v.getType();
// Check type because in case of compile errors it can be null
if (type != null) {
TypeDeclaration typeDecl = type.getDeclaration();
return typeDecl.isAnonymous();
}
return false;
}
public static boolean isImplemented(Declaration decl) {
if (decl instanceof FunctionOrValue) {
FunctionOrValue fov = (FunctionOrValue) decl;
return fov.isImplemented();
}
return false;
}
public static boolean eq(Object decl, Object other) {
if (decl == null) {
return other == null;
} else {
return decl.equals(other);
}
}
public static boolean equal(Declaration decl, Declaration other) {
if (decl instanceof UnionType ||
decl instanceof IntersectionType ||
other instanceof UnionType ||
other instanceof IntersectionType) {
return false;
}
return ModelUtil.eq(decl, other);
}
public static boolean equalModules(Module scope, Module other) {
return eq(scope, other);
}
public static Module getModule(Declaration decl) {
return decl.getUnit().getPackage().getModule();
}
public static Package getPackage(Declaration decl) {
return decl.getUnit().getPackage();
}
public static Package getPackageContainer(Scope scope) {
// stop when null or when it's a Package
while(scope != null
&& !(scope instanceof Package)){
// stop if the container is not a Scope
if(!(scope.getContainer() instanceof Scope))
return null;
scope = (Scope) scope.getContainer();
}
return (Package) scope;
}
public static Module getModuleContainer(Scope scope) {
Package pkg = getPackageContainer(scope);
return pkg != null ? pkg.getModule() : null;
}
public static ClassOrInterface getClassOrInterfaceContainer(
Element decl) {
return getClassOrInterfaceContainer(decl, true);
}
public static ClassOrInterface getClassOrInterfaceContainer(
Element decl, boolean includingDecl) {
if (!includingDecl) {
decl = (Element) decl.getContainer();
}
// stop when null or when it's a ClassOrInterface
while(decl != null
&& !(decl instanceof ClassOrInterface)){
// stop if the container is not an Element
if(!(decl.getContainer() instanceof Element))
return null;
decl = (Element) decl.getContainer();
}
return (ClassOrInterface) decl;
}
public static void setVisibleScope(Declaration model) {
Scope s=model.getContainer();
while (s!=null) {
if (s instanceof Declaration) {
if (model.isShared()) {
if (!((Declaration) s).isShared()) {
model.setVisibleScope(s.getContainer());
break;
}
}
else {
model.setVisibleScope(s);
break;
}
}
else if (s instanceof Package) {
//TODO: unshared packages!
/*if (!((Package) s).isShared()) {
model.setVisibleScope(s);
}*/
if (!model.isShared()) {
model.setVisibleScope(s);
}
//null visible scope means visible everywhere
break;
}
else {
model.setVisibleScope(s);
break;
}
s = s.getContainer();
}
}
/**
* Determines whether the declaration's containing scope is a class or interface
* @param decl The declaration
* @return true if the declaration is within a class or interface
*/
public static boolean withinClassOrInterface(Declaration decl) {
return decl.getContainer() instanceof ClassOrInterface;
}
public static boolean withinClass(Declaration decl) {
return decl.getContainer() instanceof Class;
}
public static boolean isLocalToInitializer(Declaration decl) {
return withinClass(decl) && !isCaptured(decl);
}
public static boolean isCaptured(Declaration decl) {
// Shared elements are implicitely captured although the typechecker doesn't mark them that way
return decl.isCaptured() || decl.isShared();
}
public static boolean isNonTransientValue(Declaration decl) {
return (decl instanceof Value)
&& !((Value)decl).isTransient();
}
/**
* Is the given scope a local scope but not an initializer scope?
*/
public static boolean isLocalNotInitializerScope(Scope scope) {
return scope instanceof FunctionOrValue
|| scope instanceof Constructor
|| scope instanceof ControlBlock
|| scope instanceof NamedArgumentList
|| scope instanceof Specification;
}
/**
* Determines whether the declaration is local to a method,
* getter or setter, but <strong>returns {@code false} for a declaration
* local to a Class initializer.</strong>
* @param decl The declaration
* @return true if the declaration is local
*/
public static boolean isLocalNotInitializer(Declaration decl) {
return isLocalNotInitializerScope(decl.getContainer());
}
public static boolean argumentSatisfiesEnumeratedConstraint(
Type receiver, Declaration member,
List<Type> typeArguments,
Type argType,
TypeParameter param) {
List<Type> caseTypes = param.getCaseTypes();
if (caseTypes==null || caseTypes.isEmpty()) {
//no enumerated constraint
return true;
}
//if the type argument is a subtype of one of the cases
//of the type parameter then the constraint is satisfied
for (Type ct: caseTypes) {
Type cts =
ct.appliedType(receiver, member,
typeArguments, null);
if (argType.isSubtypeOf(cts)) {
return true;
}
}
//if the type argument is itself a type parameter with
//an enumerated constraint, and every enumerated case
//is a subtype of one of the cases of the type parameter,
//then the constraint is satisfied
TypeDeclaration atd = argType.getDeclaration();
if (argType.isTypeParameter()) {
List<Type> argCaseTypes =
atd.getCaseTypes();
if (argCaseTypes!=null &&
!argCaseTypes.isEmpty()) {
for (Type act: argCaseTypes) {
boolean foundCase = false;
for (Type ct: caseTypes) {
Type cts =
ct.appliedType(receiver,
member,
typeArguments, null);
if (act.isSubtypeOf(cts)) {
foundCase = true;
break;
}
}
if (!foundCase) {
return false;
}
}
return true;
}
}
return false;
}
public static boolean isBooleanTrue(Declaration d) {
return d!=null && d.getQualifiedNameString()
.equals("ceylon.language::true");
}
public static boolean isBooleanFalse(Declaration d) {
return d!=null && d.getQualifiedNameString()
.equals("ceylon.language::false");
}
public static Type genericFunctionType(
Generic generic, Scope scope,
Declaration member, Reference reference,
Unit unit) {
List<TypeParameter> typeParameters =
generic.getTypeParameters();
TypeAlias ta = new TypeAlias();
ta.setContainer(scope);
ta.setName("Anonymous#" + member.getName());
ta.setAnonymous(true);
ta.setScope(scope);
ta.setUnit(unit);
ta.setExtendedType(reference.getFullType());
ta.setTypeParameters(typeParameters);
Type type = ta.getType();
type.setTypeConstructor(true);
return type;
}
public static boolean isConstructor(Declaration member) {
return member instanceof Constructor ||
member instanceof FunctionOrValue &&
((FunctionOrValue) member)
.getTypeDeclaration()
instanceof Constructor;
}
} |
package org.ow2.proactive.utils.console;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Map;
import javax.script.Bindings;
import javax.script.ScriptContext;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
import org.apache.log4j.Logger;
import org.objectweb.proactive.core.util.log.ProActiveLogger;
//import org.ow2.proactive.scheduler.common.util.SchedulerLoggers;
/**
* UserSchedulerModel is the class to extend to drive consoles.
*
* @author The ProActive Team
* @since ProActive Scheduling 1.0
*/
public abstract class ConsoleModel {
protected static String newline = System.getProperty("line.separator");
protected static int cmdHelpMaxCharLength = 28;
protected boolean initialized = false;
protected boolean terminated = false;
protected boolean displayStack = true;
protected boolean displayOnDemand = true;
protected ScriptEngine engine;
protected Console console;
protected static ConsoleModel model;
protected boolean allowExitCommand;
protected String initEnvFileName = null;
protected ArrayList<Command> commands;
//protected Logger logger = ProActiveLogger.getLogger(SchedulerLoggers.CONSOLE);
protected ConsoleModel() {
commands = new ArrayList<Command>();
commands.add(new Command("filterspush(regexp)", "Add a new regexp to the list of filters"));
commands.add(new Command("filterspop()", "Remove the last inserted regexp from the list of filters"));
commands.add(new Command("filtersclear()", "Clear the list of filters"));
commands.add(new Command("setpagination(state)", "Enable or disable the pagination of the console "
+ "(state is a boolean, true to enable pagination, false to disable it)"));
commands.add(new Command("addcandidate(str)",
"Add a completion candidate to the current completion list "
+ "(str is a string representing the candidate to add)"));
commands
.add(new Command(
"exmode(display,onDemand)",
"Change the way exceptions are displayed (if display is true, stacks are displayed - if onDemand is true, prompt before displaying stacks)"));
}
/**
* Retrieve a completion list from the list of commands
*
* @return a completion list as a string array
*/
protected String[] getCompletionList() {
String[] ret = new String[commands.size()];
for (int i = 0; i < commands.size(); i++) {
String name = commands.get(i).getName();
int lb = name.indexOf('(');
if (lb > 0) {
ret[i] = name.substring(0, lb + 1);
if (name.indexOf(')') - lb == 1) {
ret[i] += ");";
}
} else {
ret[i] = name;
}
}
return ret;
}
public void setInitEnv(String fileName) {
this.initEnvFileName = fileName;
}
/**
* Start this model
*
* @throws Exception
*/
public abstract void startModel() throws Exception;
/**
* @param msg the message to display
* @param the exception to manage
*/
protected void logUserException(String msg, Throwable t) {
//log the exception independently on the configuration
final Writer result = new StringWriter();
final PrintWriter printWriter = new PrintWriter(result);
t.printStackTrace(printWriter);
//logger.info("User exception occured. Msg: " + msg + " stacktrace: " + result);
}
/**
* Display the given message with the given exception according to the options set.
* This will display the exception on demand or not at all.
*
* @param msg the message to display
* @param t the exception to manage
*/
public void handleExceptionDisplay(String msg, Throwable t) {
if (!displayStack) {
console.error(msg + " : " + (t.getMessage() == null ? t : t.getMessage()));
} else {
if (displayOnDemand) {
console.handleExceptionDisplay(msg, t);
} else {
console.printStackTrace(t);
}
}
}
/**
* print the message to the selected output
*
* @param msg the message to print
*/
public void print(String msg) {
console.print(msg);
}
/**
* print the message to the selected error output
*
* @param msg the message to print
*/
public void error(String msg) {
console.error(msg);
}
/**
* Add a candidate for completion
*
* @param candidate
*/
public void addCandidate_(String candidate) {
if (candidate == null) {
error("Candidate string cannot be null or empty");
} else {
console.addCompletion(candidate);
}
}
/**
* Set the exception mode
*
* @param displayStack true if the stack must be displayed, false otherwise. If false, second parameter is ignored.
* @param displayOnDemand true if the console ask if user want to display the stack or not.
*/
public void setExceptionMode_(boolean displayStack, boolean displayOnDemand) {
this.displayStack = displayStack;
this.displayOnDemand = displayOnDemand;
String msg = "Exception display mode changed : ";
if (!displayStack) {
msg += "stack trace not displayed";
} else {
if (displayOnDemand) {
msg += "stack trace displayed on demand";
} else {
msg += "stack trace displayed everytime";
}
}
print(msg);
}
public void help_() {
print(newline + helpScreen());
}
public void cnslhelp_() {
print(newline + helpScreenCnsl());
}
/**
* Check if the model is ready. First check if the console is set and the display is not set on standard output.
*/
protected void checkIsReady() {
if (console == null) {
throw new RuntimeException("Console is not set, it must be set before starting the model");
}
}
/**
* Initialize the console model with the given script file if set.
* Set it with the {@link #setJS_INIT_FILE(String)} if needed.
*
* @throws IOException if something wrong occurs
*/
protected void initialize() throws IOException {
if (!initialized) {
ScriptEngineManager manager = new ScriptEngineManager();
// Engine selection
engine = manager.getEngineByExtension("js");
engine.getContext().setWriter(console.writer());
initialized = true;
}
}
/**
* Convenience method use to evaluate a new script command.
*
* @param cmd the command to evaluate
*/
protected void eval(String cmd) {
eval(cmd, null);
}
/**
* Method use to evaluate a new script command.
*
* @param cmd the command to evaluate
* @param bindings will be added to the JS context if not null
*/
protected void eval(String cmd, Map<String, String> bindings) {
try {
if (!initialized) {
initialize();
}
//Evaluate the command
if (cmd == null) {
error("*ERROR* - Standard input stream has been terminated !");
terminated = true;
} else {
checkIsReady();
if (bindings != null && bindings.size() > 0) {
Bindings bdgs = engine.getBindings(ScriptContext.ENGINE_SCOPE);
if (bdgs != null) {
bdgs.putAll(bindings);
}
}
engine.eval(cmd);
}
} catch (ScriptException e) {
error("*SYNTAX ERROR* - " + format(e.getMessage()));
e.printStackTrace();
} catch (Exception e) {
handleExceptionDisplay("Error while evaluating command", e);
}
}
/**
* Read the given file and return its content as a string.
*
* @param reader the reader on an opened file.
* @return the content of the file as a string.
* @throws IOException
*/
protected static String readFileContent(BufferedReader reader) throws IOException {
StringBuilder sb = new StringBuilder();
String tmp;
while ((tmp = reader.readLine()) != null) {
sb.append(tmp);
}
return sb.toString();
}
/**
* Format the given string and return the result.
* Use to remove some useless characters in the exception message returned by the script engine.
*
* @param msg the message to format.
* @return the formatted message.
*/
private static String format(String msg) {
msg = msg.replaceFirst("[^:]+:", "");
return msg.replaceFirst("[(]<.*", "").trim();
}
/**
* Get the console
*
* @return the console
*/
public Console getConsole() {
return console;
}
/**
* Connect the console value to the given console value
*
* @param console the console to connect
*/
public void connectConsole(Console console) {
if (console == null) {
throw new NullPointerException("Given console is null");
}
this.console = console;
}
} |
package cz.cuni.lf1.lge.ThunderSTORM.results;
import cz.cuni.lf1.lge.ThunderSTORM.UI.RenderingOverlay;
import cz.cuni.lf1.lge.ThunderSTORM.rendering.RenderingQueue;
import ij.ImagePlus;
import java.awt.Color;
import java.lang.reflect.InvocationTargetException;
import java.math.BigDecimal;
import java.math.MathContext;
import java.util.Locale;
import javax.swing.SwingUtilities;
/**
* Class similar to ImageJ's ResultsTable class containing some of the most
* frequently used methods.
*
* Note that all the deprecated methods were omitted. Also the methods load/save
* are not present here - use IImportExport instead.
*
* Also methods incrementCounter and getCounter are not used since it is
* useless. In the ImageJ they are used for reallocation of memory, but here ve
* use collections so wee don't need this functionality.
*
* We also do not need to use row labels for anything, hence the related methods
* are not implemented in this class.
*/
public class IJResultsTable extends GenericTable<ResultsTableWindow> {
public static final String TITLE = "ThunderSTORM: results";
public static final String IDENTIFIER = "results";
private static IJResultsTable resultsTable = null;
public synchronized static IJResultsTable getResultsTable() {
if(resultsTable == null) {
if(SwingUtilities.isEventDispatchThread()) {
setResultsTable(new IJResultsTable());
} else {
try {
SwingUtilities.invokeAndWait(new Runnable() {
@Override
public void run() {
setResultsTable(new IJResultsTable());
}
});
} catch(InterruptedException ex) {
throw new RuntimeException(ex);
} catch(InvocationTargetException ex) {
throw new RuntimeException(ex.getCause());
}
}
}
return resultsTable;
}
public static void setResultsTable(IJResultsTable rt) {
resultsTable = rt;
}
public static boolean isResultsWindow() {
if(resultsTable == null) {
return false;
}
return resultsTable.tableWindow.isVisible();
}
private ImagePlus analyzedImage;
private MeasurementProtocol measurementProtocol = null;
/**
* Constructs an empty ResultsTable with the counter=0 and no columns.
*/
public IJResultsTable() {
super(new ResultsTableWindow(IJResultsTable.TITLE));
}
public void setMeasurementProtocol(MeasurementProtocol protocol) {
measurementProtocol = protocol;
}
public MeasurementProtocol getMeasurementProtocol() {
return measurementProtocol;
}
public void setAnalyzedImage(ImagePlus imp) {
analyzedImage = imp;
}
public ImagePlus getAnalyzedImage() {
return analyzedImage;
}
public void repaintAnalyzedImageOverlay() {
if(analyzedImage != null) {
analyzedImage.setOverlay(null);
RenderingOverlay.showPointsInImage(this, analyzedImage, null, Color.red, RenderingOverlay.MARKER_CROSS);
}
}
@Override
public void reset() {
super.reset();
tableWindow.setPreviewRenderer(null);
tableWindow.getOperationHistoryPanel().removeAllOperations();
tableWindow.setStatus(null);
}
//delegated methods from window
public void showPreview() {
tableWindow.showPreview();
}
public void setLivePreview(boolean enabled) {
tableWindow.setLivePreview(enabled);
}
public OperationsHistoryPanel getOperationHistoryPanel() {
return tableWindow.getOperationHistoryPanel();
}
public void setPreviewRenderer(RenderingQueue renderer) {
tableWindow.setPreviewRenderer(renderer);
}
public void setStatus(String text) {
tableWindow.setStatus(text);
}
ResultsFilter getFilter() {
return tableWindow.getFilter();
}
DuplicatesFilter getDuplicatesFilter() {
return tableWindow.getDuplicatesFilter();
}
ResultsGrouping getGrouping() {
return tableWindow.getGrouping();
}
ResultsDriftCorrection getDriftCorrection() {
return tableWindow.getDriftCorrection();
}
ResultsStageOffset getStageOffset() {
return tableWindow.getStageOffset();
}
void addNewFilter(String paramName, double greaterThan, double lessThan) {
String formula = tableWindow.getFilterFormula().trim();
StringBuilder sb = new StringBuilder(formula);
if(!formula.isEmpty()) {
sb.append(" & ");
}
sb.append("(");
sb.append(paramName).append(" > ").append(BigDecimal.valueOf(greaterThan).round(new MathContext(6)).toString());
sb.append(" & ");
sb.append(paramName).append(" < ").append(BigDecimal.valueOf(lessThan).round(new MathContext(6)).toString());
sb.append(")");
tableWindow.setFilterFormula(sb.toString());
}
@Override
public String getFrameTitle() {
return IJResultsTable.TITLE;
}
@Override
public String getTableIdentifier() {
return IJResultsTable.IDENTIFIER;
}
} |
package de.lmu.ifi.dbs.algorithm.clustering;
import de.lmu.ifi.dbs.algorithm.AbstractAlgorithm;
import de.lmu.ifi.dbs.algorithm.Algorithm;
import de.lmu.ifi.dbs.algorithm.result.clustering.ClustersPlusNoise;
import de.lmu.ifi.dbs.data.RealVector;
import de.lmu.ifi.dbs.database.AssociationID;
import de.lmu.ifi.dbs.database.Database;
import de.lmu.ifi.dbs.distance.DoubleDistance;
import de.lmu.ifi.dbs.distance.LocallyWeightedDistanceFunction;
import de.lmu.ifi.dbs.logging.LogLevel;
import de.lmu.ifi.dbs.logging.ProgressLogRecord;
import de.lmu.ifi.dbs.preprocessing.ProjectedDBSCANPreprocessor;
import de.lmu.ifi.dbs.utilities.Progress;
import de.lmu.ifi.dbs.utilities.QueryResult;
import de.lmu.ifi.dbs.utilities.Util;
import de.lmu.ifi.dbs.utilities.optionhandling.*;
import java.util.*;
/**
* Provides an abstract algorithm requiring a VarianceAnalysisPreprocessor.
*
* @author Arthur Zimek (<a
* href="mailto:zimek@dbs.ifi.lmu.de">zimek@dbs.ifi.lmu.de</a>)
*/
public abstract class ProjectedDBSCAN<P extends ProjectedDBSCANPreprocessor>
extends AbstractAlgorithm<RealVector> implements Clustering<RealVector> {
/**
* Parameter for epsilon.
*/
public static final String EPSILON_P = DBSCAN.EPSILON_P;
/**
* Description for parameter epsilon.
*/
public static final String EPSILON_D = "the maximum radius of the neighborhood to be considered, must be suitable to "
+ LocallyWeightedDistanceFunction.class.getName();
/**
* Parameter minimum points.
*/
public static final String MINPTS_P = DBSCAN.MINPTS_P;
/**
* Description for parameter minimum points.
*/
public static final String MINPTS_D = DBSCAN.MINPTS_D;
/**
* Epsilon.
*/
protected String epsilon;
/**
* Minimum points.
*/
protected int minpts;
/**
* Parameter lambda.
*/
public static final String LAMBDA_P = "lambda";
/**
* Description for parameter lambda.
*/
public static final String LAMBDA_D = "a positive integer specifiying the intrinsic dimensionality of clusters to be found.";
/**
* Keeps lambda.
*/
private int lambda;
/**
* Holds a list of clusters found.
*/
private List<List<Integer>> resultList;
/**
* Provides the result of the algorithm.
*/
private ClustersPlusNoise<RealVector> result;
/**
* Holds a set of noise.
*/
private Set<Integer> noise;
/**
* Holds a set of processed ids.
*/
private Set<Integer> processedIDs;
/**
* The distance function.
*/
private LocallyWeightedDistanceFunction distanceFunction = new LocallyWeightedDistanceFunction();
/**
* Provides the abstract algorithm for variance analysis based DBSCAN.
*/
protected ProjectedDBSCAN() {
super();
optionHandler.put(EPSILON_P, new Parameter(EPSILON_P, EPSILON_D, Parameter.Types.DISTANCE_PATTERN));
optionHandler.put(MINPTS_P, new Parameter(MINPTS_P, MINPTS_D, Parameter.Types.INT));
optionHandler.put(LAMBDA_P, new Parameter(LAMBDA_P, LAMBDA_D, Parameter.Types.INT));
}
/**
* @see AbstractAlgorithm#runInTime(Database)
*/
protected void runInTime(Database<RealVector> database) throws IllegalStateException {
if (isVerbose()) {
verbose("");
}
try {
Progress progress = new Progress("Clustering", database.size());
resultList = new ArrayList<List<Integer>>();
noise = new HashSet<Integer>();
processedIDs = new HashSet<Integer>(database.size());
distanceFunction.setDatabase(database, isVerbose(), isTime());
if (isVerbose()) {
verbose("\nClustering:");
}
if (database.size() >= minpts) {
for (Iterator<Integer> iter = database.iterator(); iter.hasNext();) {
Integer id = iter.next();
if (!processedIDs.contains(id)) {
expandCluster(database, id, progress);
if (processedIDs.size() == database.size() && noise.size() == 0) {
break;
}
}
if (isVerbose()) {
progress.setProcessed(processedIDs.size());
progress(new ProgressLogRecord(LogLevel.PROGRESS,
Util.status(progress, resultList.size()), progress.getTask(), progress.status()));
}
}
}
else {
for (Iterator<Integer> iter = database.iterator(); iter.hasNext();) {
Integer id = iter.next();
noise.add(id);
if (isVerbose()) {
progress.setProcessed(processedIDs.size());
progress(new ProgressLogRecord(LogLevel.PROGRESS,
Util.status(progress, resultList.size()), progress.getTask(), progress.status()));
}
}
}
if (isVerbose()) {
progress.setProcessed(processedIDs.size());
progress(new ProgressLogRecord(LogLevel.PROGRESS, Util.status(
progress, resultList.size()), progress.getTask(),
progress.status()));
}
Integer[][] resultArray = new Integer[resultList.size() + 1][];
int i = 0;
for (Iterator<List<Integer>> resultListIter = resultList.iterator(); resultListIter.hasNext(); i++) {
resultArray[i] = resultListIter.next().toArray(new Integer[0]);
}
resultArray[resultArray.length - 1] = noise.toArray(new Integer[0]);
result = new ClustersPlusNoise<RealVector>(resultArray, database);
if (isVerbose()) {
progress.setProcessed(processedIDs.size());
progress(new ProgressLogRecord(LogLevel.PROGRESS,
Util.status(progress, resultList.size()), progress.getTask(), progress.status()));
}
}
catch (Exception e) {
throw new IllegalStateException(e);
}
}
/**
* ExpandCluster function of DBSCAN.
*/
protected void expandCluster(Database<RealVector> database,
Integer startObjectID, Progress progress) {
String label = (String) database.getAssociation(AssociationID.LABEL,
startObjectID);
Integer corrDim = (Integer) database.getAssociation(
AssociationID.LOCAL_DIMENSIONALITY, startObjectID);
if (this.debug) {
debugFine("\nEXPAND CLUSTER id = " + startObjectID + " " + label
+ " " + corrDim + "\n#clusters: " + resultList.size());
}
// euclidean epsilon neighborhood < minpts OR local dimensionality >
// lambda -> noise
if (corrDim == null || corrDim > lambda) {
noise.add(startObjectID);
processedIDs.add(startObjectID);
if (isVerbose()) {
progress.setProcessed(processedIDs.size());
progress(new ProgressLogRecord(LogLevel.PROGRESS,
Util.status(progress, resultList.size()), progress.getTask(), progress.status()));
}
return;
}
// compute weighted epsilon neighborhood
List<QueryResult<DoubleDistance>> seeds = database.rangeQuery(
startObjectID, epsilon, distanceFunction);
// neighbors < minPts -> noise
if (seeds.size() < minpts) {
noise.add(startObjectID);
processedIDs.add(startObjectID);
if (isVerbose()) {
progress.setProcessed(processedIDs.size());
progress(new ProgressLogRecord(LogLevel.PROGRESS,
Util.status(progress, resultList.size()), progress.getTask(), progress.status()));
}
return;
}
// try to expand the cluster
List<Integer> currentCluster = new ArrayList<Integer>();
for (QueryResult seed : seeds) {
Integer nextID = seed.getID();
Integer nextID_corrDim = (Integer) database.getAssociation(
AssociationID.LOCAL_DIMENSIONALITY, nextID);
// nextID is not reachable from start object
if (nextID_corrDim > lambda)
continue;
if (!processedIDs.contains(nextID)) {
currentCluster.add(nextID);
processedIDs.add(nextID);
}
else if (noise.contains(nextID)) {
currentCluster.add(nextID);
noise.remove(nextID);
}
}
seeds.remove(0);
while (seeds.size() > 0) {
Integer q = seeds.remove(0).getID();
Integer corrDim_q = (Integer) database.getAssociation(
AssociationID.LOCAL_DIMENSIONALITY, q);
// q forms no lambda-dim hyperplane
if (corrDim_q > lambda)
continue;
List<QueryResult<DoubleDistance>> reachables = database.rangeQuery(
q, epsilon, distanceFunction);
if (reachables.size() > minpts) {
for (QueryResult<DoubleDistance> r : reachables) {
Integer corrDim_r = (Integer) database.getAssociation(
AssociationID.LOCAL_DIMENSIONALITY, r.getID());
// r is not reachable from q
if (corrDim_r > lambda)
continue;
boolean inNoise = noise.contains(r.getID());
boolean unclassified = !processedIDs.contains(r.getID());
if (inNoise || unclassified) {
if (unclassified) {
seeds.add(r);
}
currentCluster.add(r.getID());
processedIDs.add(r.getID());
if (inNoise) {
noise.remove(r.getID());
}
if (isVerbose()) {
progress.setProcessed(processedIDs.size());
int numClusters = currentCluster.size() > minpts ? resultList
.size() + 1
: resultList.size();
progress(new ProgressLogRecord(LogLevel.PROGRESS,
Util.status(progress, numClusters), progress.getTask(), progress.status()));
}
}
}
}
if (processedIDs.size() == database.size() && noise.size() == 0) {
break;
}
}
if (currentCluster.size() >= minpts) {
resultList.add(currentCluster);
}
else {
for (Integer id : currentCluster) {
noise.add(id);
}
noise.add(startObjectID);
processedIDs.add(startObjectID);
}
if (isVerbose()) {
progress.setProcessed(processedIDs.size());
progress(new ProgressLogRecord(LogLevel.PROGRESS, Util.status(progress,
resultList.size()), progress.getTask(), progress.status()));
}
}
/**
* @see de.lmu.ifi.dbs.utilities.optionhandling.Parameterizable#setParameters(String[])
*/
public String[] setParameters(String[] args) throws ParameterException {
String[] remainingParameters = super.setParameters(args);
epsilon = optionHandler.getOptionValue(EPSILON_P);
try {
// test whether epsilon is compatible with distance function
distanceFunction.valueOf(epsilon);
}
catch (IllegalArgumentException e) {
throw new WrongParameterValueException(EPSILON_P, epsilon, EPSILON_D);
}
// minpts
String minptsString = optionHandler.getOptionValue(MINPTS_P);
try {
minpts = Integer.parseInt(minptsString);
if (minpts <= 0) {
throw new WrongParameterValueException(MINPTS_P, minptsString, MINPTS_D);
}
}
catch (NumberFormatException e) {
throw new WrongParameterValueException(MINPTS_P, minptsString, MINPTS_D, e);
}
// lambda
String lambdaString = optionHandler.getOptionValue(LAMBDA_P);
try {
lambda = Integer.parseInt(lambdaString);
if (lambda <= 0) {
throw new WrongParameterValueException(LAMBDA_P, lambdaString, LAMBDA_D);
}
}
catch (NumberFormatException e) {
throw new WrongParameterValueException(LAMBDA_P, lambdaString, LAMBDA_D, e);
}
// parameters for the distance function
String[] distanceFunctionParameters = new String[remainingParameters.length + 7];
System.arraycopy(remainingParameters, 0, distanceFunctionParameters, 7,
remainingParameters.length);
// omit preprocessing flag
distanceFunctionParameters[0] = OptionHandler.OPTION_PREFIX + LocallyWeightedDistanceFunction.OMIT_PREPROCESSING_F;
// preprocessor
distanceFunctionParameters[1] = OptionHandler.OPTION_PREFIX + LocallyWeightedDistanceFunction.PREPROCESSOR_CLASS_P;
distanceFunctionParameters[2] = preprocessorClass().getName();
// preprocessor epsilon
distanceFunctionParameters[3] = OptionHandler.OPTION_PREFIX + ProjectedDBSCANPreprocessor.EPSILON_P;
distanceFunctionParameters[4] = epsilon;
// preprocessor minpts
distanceFunctionParameters[5] = OptionHandler.OPTION_PREFIX + ProjectedDBSCANPreprocessor.MINPTS_P;
distanceFunctionParameters[6] = Integer.toString(minpts);
distanceFunction.setParameters(distanceFunctionParameters);
setParameters(args, remainingParameters);
return remainingParameters;
}
/**
* @see Algorithm#getAttributeSettings()
*/
@Override
public List<AttributeSettings> getAttributeSettings() {
List<AttributeSettings> attributeSettings = super
.getAttributeSettings();
AttributeSettings mySettings = attributeSettings.get(0);
mySettings.addSetting(LAMBDA_P, Integer.toString(lambda));
mySettings.addSetting(EPSILON_P, epsilon);
mySettings.addSetting(MINPTS_P, Integer.toString(minpts));
attributeSettings.addAll(distanceFunction.getAttributeSettings());
return attributeSettings;
}
/**
* Returns the class actually used as
* {@link ProjectedDBSCANPreprocessor VarianceAnalysisPreprocessor}.
*
* @return the class actually used as
* {@link ProjectedDBSCANPreprocessor VarianceAnalysisPreprocessor}
*/
public abstract Class<P> preprocessorClass();
/**
* @see de.lmu.ifi.dbs.algorithm.Algorithm#getResult()
*/
public ClustersPlusNoise<RealVector> getResult() {
return result;
}
} |
package de.lmu.ifi.dbs.elki.algorithm.itemsetmining;
import gnu.trove.iterator.TLongIntIterator;
import gnu.trove.map.hash.TLongIntHashMap;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import de.lmu.ifi.dbs.elki.algorithm.AbstractAlgorithm;
import de.lmu.ifi.dbs.elki.data.BitVector;
import de.lmu.ifi.dbs.elki.data.SparseFeatureVector;
import de.lmu.ifi.dbs.elki.data.type.TypeInformation;
import de.lmu.ifi.dbs.elki.data.type.TypeUtil;
import de.lmu.ifi.dbs.elki.data.type.VectorFieldTypeInformation;
import de.lmu.ifi.dbs.elki.database.ids.ArrayModifiableDBIDs;
import de.lmu.ifi.dbs.elki.database.ids.DBIDIter;
import de.lmu.ifi.dbs.elki.database.ids.DBIDUtil;
import de.lmu.ifi.dbs.elki.database.ids.DBIDs;
import de.lmu.ifi.dbs.elki.database.relation.Relation;
import de.lmu.ifi.dbs.elki.database.relation.RelationUtil;
import de.lmu.ifi.dbs.elki.logging.Logging;
import de.lmu.ifi.dbs.elki.logging.statistics.Duration;
import de.lmu.ifi.dbs.elki.logging.statistics.LongStatistic;
import de.lmu.ifi.dbs.elki.result.AprioriResult;
import de.lmu.ifi.dbs.elki.utilities.BitsUtil;
import de.lmu.ifi.dbs.elki.utilities.documentation.Description;
import de.lmu.ifi.dbs.elki.utilities.documentation.Reference;
import de.lmu.ifi.dbs.elki.utilities.documentation.Title;
import de.lmu.ifi.dbs.elki.utilities.exceptions.AbortException;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.AbstractParameterizer;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.OptionID;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.constraints.CommonConstraints;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameterization.Parameterization;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameters.DoubleParameter;
/**
* Provides the APRIORI algorithm for Mining Association Rules.
* <p>
* Reference: <br>
* R. Agrawal, R. Srikant: Fast Algorithms for Mining Association Rules in Large
* Databases. <br>
* In Proc. 20th Int. Conf. on Very Large Data Bases (VLDB '94), Santiago de
* Chile, Chile 1994.
* </p>
*
* This implementation uses some simple optimizations for 1- and 2-itemsets.
*
* @author Arthur Zimek
* @author Erich Schubert
*
* @apiviz.has Itemset
* @apiviz.uses BitVector
*/
@Title("APRIORI: Algorithm for Mining Association Rules")
@Description("Searches for frequent itemsets")
@Reference(authors = "R. Agrawal, R. Srikant",
title = "Fast Algorithms for Mining Association Rules in Large Databases",
booktitle = "Proc. 20th Int. Conf. on Very Large Data Bases (VLDB '94), Santiago de Chile, Chile 1994",
url = "http:
public class APRIORI extends AbstractAlgorithm<AprioriResult> {
/**
* The logger for this class.
*/
private static final Logging LOG = Logging.getLogger(APRIORI.class);
/**
* Statistics logging prefix.
*/
private final String STAT = this.getClass().getName() + ".";
/**
* Minimum support. If less than 1, considered to be a relative frequency,
* otherwise an absolute count.
*/
private double minfreq;
/**
* Constructor with minimum frequency.
*
* @param minfreq Minimum frequency
*/
public APRIORI(double minfreq) {
super();
this.minfreq = minfreq;
}
/**
* Performs the APRIORI algorithm on the given database.
*
* @param relation the Relation to process
* @return the AprioriResult learned by this APRIORI
*/
public AprioriResult run(Relation<BitVector> relation) {
DBIDs ids = relation.getDBIDs();
List<Itemset> solution = new ArrayList<>();
final int size = ids.size();
final int needed = (int) ((minfreq < 1.) ? Math.ceil(minfreq * size) : minfreq);
// TODO: we don't strictly require a vector field.
// We could work with knowing just the maximum dimensionality beforehand.
VectorFieldTypeInformation<BitVector> meta = RelationUtil.assumeVectorField(relation);
if(size > 0) {
final int dim = meta.getDimensionality();
Duration timeone = LOG.newDuration(STAT + "1-items.time").begin();
List<OneItemset> oneitems = buildFrequentOneItemsets(relation, dim, needed);
LOG.statistics(timeone.end());
if(LOG.isStatistics()) {
LOG.statistics(new LongStatistic(STAT + "1-items.frequent", oneitems.size()));
LOG.statistics(new LongStatistic(STAT + "1-items.transactions", ids.size()));
}
if(LOG.isDebuggingFine()) {
LOG.debugFine(debugDumpCandidates(new StringBuilder(), oneitems, meta));
}
solution.addAll(oneitems);
if(oneitems.size() >= 2) {
Duration timetwo = LOG.newDuration(STAT + "2-items.time").begin();
ArrayModifiableDBIDs survivors = DBIDUtil.newArray(ids.size());
List<? extends Itemset> candidates = buildFrequentTwoItemsets(oneitems, relation, dim, needed, ids, survivors);
ids = survivors; // Continue with reduced set of transactions.
LOG.statistics(timetwo.end());
if(LOG.isStatistics()) {
LOG.statistics(new LongStatistic(STAT + "2-items.frequent", candidates.size()));
LOG.statistics(new LongStatistic(STAT + "2-items.transactions", ids.size()));
}
if(LOG.isDebuggingFine()) {
LOG.debugFine(debugDumpCandidates(new StringBuilder(), candidates, meta));
}
solution.addAll(candidates);
for(int length = 3; candidates.size() >= length; length++) {
Duration timel = LOG.newDuration(STAT + length + "-items.time").begin();
// Join to get the new candidates
candidates = aprioriGenerate(candidates, length, dim);
if(LOG.isDebuggingFinest()) {
LOG.debugFinest(debugDumpCandidates(new StringBuilder().append("Before pruning: "), candidates, meta));
}
survivors = DBIDUtil.newArray(ids.size());
candidates = frequentItemsets(candidates, relation, needed, ids, survivors);
ids = survivors; // Continue with reduced set of transactions.
LOG.statistics(timel.end());
if(LOG.isStatistics()) {
LOG.statistics(new LongStatistic(STAT + length + "-items.frequent", candidates.size()));
LOG.statistics(new LongStatistic(STAT + length + "-items.transactions", ids.size()));
}
if(LOG.isDebuggingFine()) {
LOG.debugFine(debugDumpCandidates(new StringBuilder(), candidates, meta));
}
solution.addAll(candidates);
}
}
}
return new AprioriResult("APRIORI", "apriori", solution, meta);
}
/**
* Build the 1-itemsets.
*
* @param relation Data relation
* @param dim Maximum dimensionality
* @param needed Minimum support needed
* @return 1-itemsets
*/
protected List<OneItemset> buildFrequentOneItemsets(final Relation<? extends SparseFeatureVector<?>> relation, final int dim, final int needed) {
// TODO: use TIntList and prefill appropriately to avoid knowing "dim"
// beforehand?
int[] counts = new int[dim];
for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
SparseFeatureVector<?> bv = relation.get(iditer);
for(int it = bv.iter(); bv.iterValid(it); it = bv.iterAdvance(it)) {
counts[bv.iterDim(it)]++;
}
}
if(LOG.isStatistics()) {
LOG.statistics(new LongStatistic(STAT + "1-items.candidates", dim));
}
// Generate initial candidates of length 1.
List<OneItemset> frequent = new ArrayList<>(dim);
for(int i = 0; i < dim; i++) {
if(counts[i] >= needed) {
frequent.add(new OneItemset(i, counts[i]));
}
}
return frequent;
}
/**
* Build the 2-itemsets.
*
* @param oneitems Frequent 1-itemsets
* @param relation Data relation
* @param dim Maximum dimensionality
* @param needed Minimum support needed
* @param ids Objects to process
* @param survivors Output: objects that had at least two 1-frequent items.
* @return Frequent 2-itemsets
*/
protected List<SparseItemset> buildFrequentTwoItemsets(List<OneItemset> oneitems, final Relation<BitVector> relation, final int dim, final int needed, DBIDs ids, ArrayModifiableDBIDs survivors) {
int f1 = 0;
long[] mask = BitsUtil.zero(dim);
for(OneItemset supported : oneitems) {
BitsUtil.setI(mask, supported.item);
f1++;
}
if(LOG.isStatistics()) {
LOG.statistics(new LongStatistic(STAT + "2-items.candidates", f1 * (long) (f1 - 1)));
}
// We quite aggressively size the map, assuming that almost each combination
// is present somewhere. If this won't fit into memory, we're likely running
// OOM somewhere later anyway!
TLongIntHashMap map = new TLongIntHashMap((f1 * (f1 - 1)) >> 1);
final long[] scratch = BitsUtil.zero(dim);
for(DBIDIter iditer = ids.iter(); iditer.valid(); iditer.advance()) {
BitsUtil.setI(scratch, mask);
relation.get(iditer).andOnto(scratch);
boolean lives = false;
for(int i = BitsUtil.nextSetBit(scratch, 0); i >= 0; i = BitsUtil.nextSetBit(scratch, i + 1)) {
for(int j = BitsUtil.nextSetBit(scratch, i + 1); j >= 0; j = BitsUtil.nextSetBit(scratch, j + 1)) {
long key = (((long) i) << 32) | j;
map.put(key, 1 + map.get(key));
lives = true;
}
}
if(lives) {
survivors.add(iditer);
}
}
// Generate candidates of length 2.
List<SparseItemset> frequent = new ArrayList<>(f1 * (int) Math.sqrt(f1));
for(TLongIntIterator iter = map.iterator(); iter.hasNext();) {
iter.advance(); // Trove style iterator - advance first.
if(iter.value() >= needed) {
int ii = (int) (iter.key() >>> 32);
int ij = (int) (iter.key() & -1L);
frequent.add(new SparseItemset(new int[] { ii, ij }, iter.value()));
}
}
// The hashmap may produce them out of order.
Collections.sort(frequent);
if(LOG.isStatistics()) {
LOG.statistics(new LongStatistic(STAT + "2-items.frequent", frequent.size()));
}
return frequent;
}
/**
* Prunes a given set of candidates to keep only those BitSets where all
* subsets of bits flipping one bit are frequent already.
*
* @param supported Support map
* @param length Itemset length
* @param dim Dimensionality
* @return itemsets that cannot be pruned by apriori
*/
protected List<Itemset> aprioriGenerate(List<? extends Itemset> supported, int length, int dim) {
List<Itemset> candidateList = new ArrayList<>();
if(supported.size() < length) {
return candidateList;
}
Itemset ref = supported.get(0);
if(ref instanceof SparseItemset) {
// TODO: we currently never switch to DenseItemSet. This may however be
// beneficial when we have few dimensions and many candidates.
// E.g. when length > 32 and dim < 100. But this needs benchmarking!
// For length < 5 and dim > 3000, SparseItemset unsurprisingly was faster
// Scratch item to use for searching.
SparseItemset scratch = new SparseItemset(new int[length - 1]);
long joined = 0L;
final int ssize = supported.size();
for(int i = 0; i < ssize; i++) {
SparseItemset ii = (SparseItemset) supported.get(i);
prefix: for(int j = i + 1; j < ssize; j++) {
SparseItemset ij = (SparseItemset) supported.get(j);
if(!ii.prefixTest(ij)) {
break prefix; // Prefix doesn't match
}
joined++;
// Test subsets (re-) using scratch object
System.arraycopy(ii.indices, 1, scratch.indices, 0, length - 2);
scratch.indices[length - 2] = ij.indices[length - 2];
for(int k = length - 3; k >= 0; k
scratch.indices[k] = ii.indices[k + 1];
int pos = Collections.binarySearch(supported, scratch);
if(pos < 0) {
// Prefix was okay, but one other subset was not frequent
continue prefix;
}
}
int[] items = new int[length];
System.arraycopy(ii.indices, 0, items, 0, length - 1);
items[length - 1] = ij.indices[length - 2];
candidateList.add(new SparseItemset(items));
}
}
if(LOG.isStatistics()) {
// Naive pairwise approach
LOG.statistics(new LongStatistic(STAT + length + "-items.pairwise", (ssize * ((long) ssize - 1))));
LOG.statistics(new LongStatistic(STAT + length + "-items.joined", joined));
LOG.statistics(new LongStatistic(STAT + length + "-items.candidates", candidateList.size()));
}
return candidateList;
}
if(ref instanceof DenseItemset) {
// Scratch item to use for searching.
DenseItemset scratch = new DenseItemset(BitsUtil.zero(dim), length - 1);
long joined = 0L;
final int ssize = supported.size();
for(int i = 0; i < ssize; i++) {
DenseItemset ii = (DenseItemset) supported.get(i);
prefix: for(int j = i + 1; j < ssize; j++) {
DenseItemset ij = (DenseItemset) supported.get(j);
// Prefix test via "|i1 ^ i2| = 2"
System.arraycopy(ii.items, 0, scratch.items, 0, ii.items.length);
BitsUtil.xorI(scratch.items, ij.items);
if(BitsUtil.cardinality(scratch.items) != 2) {
break prefix; // No prefix match; since sorted, no more can follow!
}
++joined;
// Ensure that the first difference is the last item in ii:
int first = BitsUtil.nextSetBit(scratch.items, 0);
if(BitsUtil.nextSetBit(ii.items, first + 1) > -1) {
break prefix; // Different overlap by chance?
}
BitsUtil.orI(scratch.items, ij.items);
// Test subsets.
for(int l = length, b = BitsUtil.nextSetBit(scratch.items, 0); l > 2; l--, b = BitsUtil.nextSetBit(scratch.items, b + 1)) {
BitsUtil.clearI(scratch.items, b);
int pos = Collections.binarySearch(supported, scratch);
if(pos < 0) {
continue prefix;
}
BitsUtil.setI(scratch.items, b);
}
candidateList.add(new DenseItemset(scratch.items.clone(), length));
}
}
if(LOG.isStatistics()) {
// Naive pairwise approach
LOG.statistics(new LongStatistic(STAT + length + "-items.pairwise", (ssize * ((long) ssize - 1))));
LOG.statistics(new LongStatistic(STAT + length + "-items.joined", joined));
LOG.statistics(new LongStatistic(STAT + length + "-items.candidates", candidateList.size()));
}
return candidateList;
}
throw new AbortException("Unexpected itemset type " + ref.getClass());
}
/**
* Returns the frequent BitSets out of the given BitSets with respect to the
* given database.
*
* @param support Support map.
* @param candidates the candidates to be evaluated
* @param relation the database to evaluate the candidates on
* @param needed Minimum support needed
* @param ids Objects to process
* @param survivors Output: objects that had at least two 1-frequent items.
* @return Itemsets with sufficient support
*/
protected List<? extends Itemset> frequentItemsets(List<? extends Itemset> candidates, Relation<BitVector> relation, int needed, DBIDs ids, ArrayModifiableDBIDs survivors) {
for(DBIDIter iditer = ids.iter(); iditer.valid(); iditer.advance()) {
BitVector bv = relation.get(iditer);
// TODO: exploit that the candidate set it sorted?
boolean lives = false;
for(Itemset candidate : candidates) {
if(candidate.containedIn(bv)) {
candidate.increaseSupport();
lives = true;
}
}
if(lives) {
survivors.add(iditer);
}
}
// Retain only those with minimum support:
List<Itemset> frequent = new ArrayList<>(candidates.size());
for(Iterator<? extends Itemset> iter = candidates.iterator(); iter.hasNext();) {
final Itemset candidate = iter.next();
if(candidate.getSupport() >= needed) {
frequent.add(candidate);
}
}
return frequent;
}
/**
* Debug method: output all itemsets.
*
* @param msg Output buffer
* @param candidates Itemsets to dump
* @param meta Metadata for item labels
* @return Output buffer
*/
private StringBuilder debugDumpCandidates(StringBuilder msg, List<? extends Itemset> candidates, VectorFieldTypeInformation<BitVector> meta) {
msg.append(':');
for(Itemset itemset : candidates) {
msg.append(" [");
itemset.appendTo(msg, meta);
msg.append(']');
}
return msg;
}
@Override
public TypeInformation[] getInputTypeRestriction() {
return TypeUtil.array(TypeUtil.BIT_VECTOR_FIELD);
}
@Override
protected Logging getLogger() {
return LOG;
}
/**
* Parameterization class.
*
* @author Erich Schubert
*
* @apiviz.exclude
*/
public static class Parameterizer extends AbstractParameterizer {
/**
* Parameter to specify the minimum support, in absolute or relative terms.
*/
public static final OptionID MINSUPP_ID = new OptionID("apriori.minsupp",
"Threshold for minimum support as minimally required number of transactions (if > 1) "
+ "or the minimum frequency (if <= 1).");
/**
* Parameter for minimum support.
*/
protected double minsupp;
@Override
protected void makeOptions(Parameterization config) {
super.makeOptions(config);
DoubleParameter minsuppP = new DoubleParameter(MINSUPP_ID);
minsuppP.addConstraint(CommonConstraints.GREATER_THAN_ZERO_DOUBLE);
if(config.grab(minsuppP)) {
minsupp = minsuppP.getValue();
}
}
@Override
protected APRIORI makeInstance() {
return new APRIORI(minsupp);
}
}
} |
package dr.evomodel.coalescent;
import dr.evomodel.tree.TreeModel;
import dr.inference.model.Likelihood;
/**
* Calculates a product of exponential densities and exponential tail probabilities.
*
* @author Guy Baele
*/
public class ExponentialProductLikelihood extends Likelihood.Abstract {
private TreeModel treeModel;
private double logPopSize;
//make sure to provide a log(popSize)
public ExponentialProductLikelihood(TreeModel treeModel, double logPopSize) {
super(treeModel);
this.treeModel = treeModel;
this.logPopSize = logPopSize;
}
public double calculateLogLikelihood() {
//System.err.println(treeModel);
double logPDF = 0.0;
//System.err.println("log(popSize) = " + this.popSize);
CoalescentTreeIntervalStatistic ctis = new CoalescentTreeIntervalStatistic(treeModel);
for (int i = 0; i < ctis.getDimension(); i++) {
int combinations = (int)ctis.getLineageCount(i)*((int)ctis.getLineageCount(i)-1)/2;
double branchLength = ctis.getStatisticValue(i);
//System.err.println("combinations = " + combinations);
//System.err.println("branchLength = " + branchLength);
//System.err.println(ctis.getLineageCount(i));
//single-lineage intervals are not counted
if (ctis.getLineageCount(i) != 1) {
//System.err.println(i + " -> lineage count: " + ctis.getLineageCount(i));
if (i == ctis.getDimension()-1) {
//coalescent event at root: exponential density
//System.err.print("coalescent event at root: ");
double logContribution = -logPopSize - combinations*branchLength*Math.exp(-logPopSize);
logPDF += logContribution;
//System.err.println(logContribution);
} else if (ctis.getLineageCount(i) > ctis.getLineageCount(i+1)) {
//coalescent event: exponential density
//System.err.print("coalescent event (not at root): ");
double logContribution = -logPopSize - combinations*branchLength*Math.exp(-logPopSize);
logPDF += logContribution;
//System.err.println(logContribution);
} else {
//sampling event: exponential tail probability
//System.err.print("sampling event: ");
double logContribution = -combinations*branchLength*Math.exp(-logPopSize);
logPDF += logContribution;
//System.err.println(logContribution);
}
}
}
//System.err.println("expoLike = " + logPDF + "\n");
return logPDF;
}
/**
* Overridden to always return false.
*/
protected boolean getLikelihoodKnown() {
return false;
}
} |
package dr.math.distributions;
/**
* @author Marc Suchard
* @author Guy Baele
*/
public class MultivariateGammaDistribution implements MultivariateDistribution {
//TODO: Currently this implements a product of independent Gammas, need to re-code as true multivariate distribution
public static final String TYPE = "multivariateGamma";
public MultivariateGammaDistribution(double[] shape, double[] scale) {
if (shape.length != scale.length)
throw new RuntimeException("Creation error in MultivariateGammaDistribution");
dim = shape.length;
this.shape = shape;
this.scale = scale;
this.flags = new boolean[dim];
for (int i = 0; i < dim; i++) {
flags[i] = true;
}
}
public MultivariateGammaDistribution(double[] shape, double[] scale, boolean[] flags) {
if (shape.length != scale.length)
throw new RuntimeException("Creation error in MultivariateGammaDistribution");
dim = shape.length;
this.shape = shape;
this.scale = scale;
this.flags = flags;
}
public double logPdf(double[] x) {
double logPdf = 0;
if (x.length != dim) {
throw new IllegalArgumentException("data array is of the wrong dimension");
}
for (int i = 0; i < dim; i++) {
if (flags[i]) {
logPdf += GammaDistribution.logPdf(x[i], shape[i], scale[i]);
}
}
return logPdf;
}
public double[][] getScaleMatrix() {
throw new RuntimeException("Not yet implemented");
}
public double[] getMean() {
throw new RuntimeException("Not yet implemented");
}
public String getType() {
return TYPE;
}
private double[] shape;
private double[] scale;
private int dim;
//for each flag that is true, add the logPdf of that gamma distribution to the overall logPdf
private boolean[] flags;
} |
package edu.mit.streamjit.impl.compiler;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import edu.mit.streamjit.impl.compiler.insts.Instruction;
import edu.mit.streamjit.impl.compiler.insts.PhiInst;
import edu.mit.streamjit.impl.compiler.types.VoidType;
import java.util.ArrayDeque;
import java.util.Collections;
import java.util.IdentityHashMap;
import java.util.Queue;
import java.util.Set;
/**
* Eliminates dead code from methods or blocks.
* @author Jeffrey Bosboom <jeffreybosboom@gmail.com>
* @since 4/25/2013
*/
public final class DeadCodeElimination {
private DeadCodeElimination() {}
public static boolean eliminateDeadCode(Method method) {
boolean changed = false, makingProgress;
do {
makingProgress = false;
// changed |= makingProgress |= eliminateTriviallyDeadInsts(method);
changed |= makingProgress |= eliminateUselessPhis(method);
} while (makingProgress);
return changed;
}
public static boolean eliminateDeadCode(BasicBlock block) {
boolean changed = false, makingProgress;
do {
makingProgress = false;
// changed |= makingProgress |= eliminateTriviallyDeadInsts(block);
changed |= makingProgress |= eliminateUselessPhis(block);
} while (makingProgress);
return changed;
}
//TODO: these need a much more nuanced understanding of side effects to be safe.
// public static boolean eliminateTriviallyDeadInsts(Method method) {
// boolean changed = false, makingProgress;
// makingProgress = false;
// for (BasicBlock block : method.basicBlocks())
// changed |= makingProgress |= eliminateTriviallyDeadInsts(block);
// } while (makingProgress);
// return changed;
// public static boolean eliminateTriviallyDeadInsts(BasicBlock block) {
// boolean changed = false, makingProgress;
// makingProgress = false;
// for (Instruction i : ImmutableList.copyOf(block.instructions()))
// if (!(i.getType() instanceof VoidType) && i.uses().isEmpty()) {
// i.eraseFromParent();
// changed = makingProgress = true;
// } while (makingProgress);
// return changed;
public static boolean eliminateUselessPhis(Method method) {
boolean changed = false, makingProgress;
do {
makingProgress = false;
for (BasicBlock block : method.basicBlocks())
changed |= makingProgress |= eliminateUselessPhis(block);
} while (makingProgress);
return changed;
}
public static boolean eliminateUselessPhis(BasicBlock block) {
boolean changed = false, makingProgress;
do {
makingProgress = false;
for (Instruction i : ImmutableList.copyOf(block.instructions())) {
if (!(i instanceof PhiInst))
continue;
PhiInst pi = (PhiInst)i;
if (Iterables.size(pi.incomingValues()) == 1) {
pi.replaceInstWithValue(Iterables.getOnlyElement(pi.incomingValues()));
makingProgress = true;
continue;
}
ImmutableSet<Value> phiSources = phiSources(pi);
if (phiSources.size() == 1) {
pi.replaceInstWithValue(phiSources.iterator().next());
makingProgress = true;
continue;
}
}
changed |= makingProgress;
} while (makingProgress);
return changed;
}
/**
* Finds all the non-phi values that might be the result of the given
* PhiInst. This will look through intermediate PhiInsts in the hope that
* they all can only select one value.
* @param inst the phi instruction to find sources of
* @return a list of the non-phi values that might be the result
*/
private static ImmutableSet<Value> phiSources(PhiInst inst) {
Queue<PhiInst> worklist = new ArrayDeque<>();
Set<PhiInst> visited = Collections.newSetFromMap(new IdentityHashMap<PhiInst, Boolean>());
ImmutableSet.Builder<Value> builder = ImmutableSet.builder();
worklist.add(inst);
visited.add(inst);
while (!worklist.isEmpty()) {
PhiInst pi = worklist.remove();
for (Value v : pi.incomingValues())
if (v instanceof PhiInst && !visited.contains((PhiInst)v)) {
visited.add((PhiInst)v);
worklist.add((PhiInst)v);
} else if (!(v instanceof PhiInst))
builder.add(v);
}
return builder.build();
}
} |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package edu.wpi.first.wpilibj.templates.commands;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.templates.OI;
/**
*
* @author Robotics
*/
public class RunClimber extends CommandBase {
public RunClimber() {
requires(climber);
}
// Called just before this Command runs the first time
protected void initialize() {
climber.stop();
}
// Called repeatedly when this Command is scheduled to run
protected void execute() {
double climbSpeed = OI.getDriveJoystick().getAxisChannel(Joystick.AxisType.kThrottle);
System.out.println(climbSpeed);
}
// Make this return true when this Command no longer needs to run execute()
protected boolean isFinished() {
return false;
}
// Called once after isFinished returns true
protected void end() {
}
// Called when another command which requires one or more of the same
// subsystems is scheduled to run
protected void interrupted() {
}
} |
@Override
public void onChannelInvited(final Channel channel) {
channel.join(new StatusListener() {
@Override
public void onSuccess() {
Log.d(TAG, "Joined Channel: " + channel.getFriendlyName());
}
});
} |
package com.timogroup.tomcat;
import com.timogroup.tomcat.config.FilterConfig;
import com.timogroup.tomcat.config.InitParameter;
import com.timogroup.tomcat.config.ListenerConfig;
import com.timogroup.tomcat.config.ServletConfig;
import org.apache.catalina.Context;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.Wrapper;
import org.apache.catalina.connector.Connector;
import org.apache.catalina.startup.Tomcat;
import org.apache.coyote.http11.Http11NioProtocol;
import javax.servlet.*;
import java.util.*;
public class EmbedTomcat {
private static final String DefaultServlet = "org.apache.catalina.servlets.DefaultServlet";
private static final String JspServlet = "org.apache.jasper.servlet.JspServlet";
private static final String Protocol = "org.apache.coyote.http11.Http11NioProtocol";
private List<InitParameter> parameterList = new ArrayList<>();
private List<ListenerConfig> listenerList = new ArrayList<>();
private List<FilterConfig> filterList = new ArrayList<>();
private List<ServletConfig> servletList = new ArrayList<>();
private Tomcat tomcat;
private String displayName = "tomcat";
private int port = 8080;
private int maxThreads = 200;
private int maxConnections = 10000;
private int connectionTimeout = 60 * 1000;
private String encoding = "utf-8";
private Context defaultServlet;
public Tomcat getTomcat() {
return tomcat;
}
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
public int getMaxThreads() {
return maxThreads;
}
public void setMaxThreads(int maxThreads) {
this.maxThreads = maxThreads;
}
public int getMaxConnections() {
return maxConnections;
}
public void setMaxConnections(int maxConnections) {
this.maxConnections = maxConnections;
}
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
}
public String getEncoding() {
return encoding;
}
public void setEncoding(String encoding) {
this.encoding = encoding;
}
public void addContextParameter(InitParameter parameter) {
parameterList.add(parameter);
}
public void addListener(ListenerConfig listener) {
listenerList.add(listener);
}
public void addFilter(FilterConfig filter) {
filterList.add(filter);
}
public void addServlet(ServletConfig servletConfig) {
servletList.add(servletConfig);
}
public void enableSpringMVC(String contextConfig, String servletConfig, String encoding) {
ListenerConfig contextLoaderListener = DefaultFactory.getDefaultContextLoaderListener(contextConfig);
addListener(contextLoaderListener);
FilterConfig filter = DefaultFactory.getDefaultCharacterEncodingFilter(encoding);
addFilter(filter);
ServletConfig dispatcherServlet = DefaultFactory.getDefaultDispatcherServlet(servletConfig);
dispatcherServlet.setAsyncSupported(true);
addServlet(dispatcherServlet);
}
public EmbedTomcat() {
this.tomcat = new Tomcat();
}
public synchronized void startAwait() throws LifecycleException {
tomcat.setPort(port);
tomcat.getHost().setAutoDeploy(false);
Connector connector = new Connector(Protocol);
Http11NioProtocol protocol = (Http11NioProtocol) connector.getProtocolHandler();
protocol.setMaxThreads(maxThreads);
protocol.setMaxConnections(maxConnections);
protocol.setConnectionTimeout(connectionTimeout);
connector.setPort(port);
connector.setURIEncoding(encoding);
tomcat.setConnector(connector);
tomcat.getService().addConnector(connector);
Context context = tomcat.addContext("/", null);
initTomcatContext(context);
tomcat.start();
showLog();
tomcat.getServer().await();
}
private void initTomcatContext(Context context) {
ServletContainerInitializer initializer = new ServletContainerInitializer() {
@Override
public void onStartup(Set<Class<?>> c, ServletContext ctx) throws ServletException {
for (InitParameter initParameter : parameterList) {
ctx.setInitParameter(initParameter.getName(), initParameter.getValue());
}
for (ListenerConfig listenerConfig : listenerList) {
InitParameter initParameter = listenerConfig.getInitParameter();
if (null != initParameter) {
ctx.setInitParameter(initParameter.getName(), initParameter.getValue());
}
ctx.addListener(listenerConfig.getListenerClass());
}
for (FilterConfig filterConfig : filterList) {
FilterRegistration.Dynamic filter = ctx.addFilter(filterConfig.getFilterName(), filterConfig.getFilterClass());
InitParameter initParameter = filterConfig.getInitParameter();
if (null != initParameter) {
filter.setInitParameter(initParameter.getName(), initParameter.getValue());
}
filter.setAsyncSupported(filterConfig.isAsyncSupported());
}
for (ServletConfig servletConfig : servletList) {
ServletRegistration.Dynamic servlet = ctx.addServlet(servletConfig.getServletName(), servletConfig.getServletClass());
InitParameter initParameter = servletConfig.getInitParameter();
if (null != initParameter) {
servlet.setInitParameter(initParameter.getName(), initParameter.getValue());
}
servlet.addMapping(servletConfig.getUrlPatterns());
servlet.setLoadOnStartup(servletConfig.getLoadOnStartup());
servlet.setAsyncSupported(servletConfig.isAsyncSupported());
}
}
};
context.addServletContainerInitializer(initializer, Collections.emptySet());
setDefaultServlet(context);
setJspServlet(context);
Map<String, String> map = DefaultFactory.getDefaultMimeMapping();
for (String key : map.keySet()) {
String value = map.get(key);
context.addMimeMapping(key, value);
}
}
private void setDefaultServlet(Context context) {
String name = "default";
Wrapper defaultServlet = context.createWrapper();
defaultServlet.setName(name);
defaultServlet.setServletClass(DefaultServlet);
defaultServlet.addInitParameter("debug", "0");
defaultServlet.addInitParameter("listings", "false");
defaultServlet.setLoadOnStartup(1);
defaultServlet.setOverridable(true);
context.addChild(defaultServlet);
context.addServletMapping("/", name);
}
private void setJspServlet(Context context) {
String name = "jsp";
Wrapper jspServlet = context.createWrapper();
jspServlet.setName("jsp");
jspServlet.setServletClass(JspServlet);
jspServlet.addInitParameter("fork", "false");
jspServlet.addInitParameter("xpoweredBy", "false");
jspServlet.setLoadOnStartup(3);
context.addChild(jspServlet);
context.addServletMapping("*.jsp", name);
context.addServletMapping("*.jspx", name);
}
private void showLog() {
StringBuffer buffer = new StringBuffer();
buffer.append("**********************************" + System.lineSeparator());
buffer.append("* *" + System.lineSeparator());
buffer.append("* EmbedTomcat Application *" + System.lineSeparator());
buffer.append("* *" + System.lineSeparator());
buffer.append("**********************************" + System.lineSeparator());
buffer.append(String.format("DisplayName: %s", displayName) + System.lineSeparator());
buffer.append(String.format("Port: %d", port) + System.lineSeparator());
System.out.println(buffer.toString());
}
} |
package org.rstudio.core.client.patch;
import org.rstudio.core.client.Debug;
import org.rstudio.core.client.js.JsObject;
public class SubstringDiff
{
public SubstringDiff(String origVal, String newVal)
{
try
{
JsObject diff = diffImpl(origVal, newVal);
replacement_ = diff.getString("replacement");
offset_ = diff.getInteger("offset");
length_ = diff.getInteger("length");
valid_ = true;
}
catch (Exception e)
{
Debug.logException(e);
replacement_ = "";
offset_ = 0;
length_ = 0;
valid_ = false;
}
}
private static final native JsObject diffImpl(String origVal, String newVal)
public String getReplacement()
{
return replacement_;
}
public int getOffset()
{
return offset_;
}
public int getLength()
{
return length_;
}
public boolean isEmpty()
{
return length_ == 0 && replacement_.length() == 0;
}
public boolean isValid()
{
return valid_;
}
private int offset_;
private int length_;
private String replacement_;
private boolean valid_;
} |
package ie.gmit.sw.ai.traversers;
/*
Taken from AI-MAZE_ALGOS project from moodle
*/
import ie.gmit.sw.ai.node.*;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Set;
public class DepthLimitedDFSTraversator implements Traversator{
private Node[][] maze;
private int limit;
private boolean keepRunning = true;
private long time = System.currentTimeMillis();
private int visitCount = 0;
private Node goal;
private Node start;
private Set<Node> isVisited = null;
private LinkedList<Node> pathToGoal = null;
public DepthLimitedDFSTraversator(int limit,Node goal){
this.limit = limit;
this.goal = goal;
}
public void traverse(Node[][] maze, Node node) {
pathToGoal = new LinkedList<>();
this.maze = maze;
start = node;
// create new hashset to keep track of visited nodes
isVisited = new HashSet<>();
// System.out.println("Search with limit " + limit);
if(dfs(node, 1) == true){
pathToGoal.addFirst(node);
}
//System.out.println("Finished Search: " + isVisited.size() + " Visit count: " + visitCount);
// clear visited nodes
isVisited = null;
//if(pathToGoal.size() > 0)
//System.out.println("Path size: " + pathToGoal.size());
}
// gets the next node in the path
// to the goal node if one was found
// otherwise returns null
public Node getNextNode(){
if(pathToGoal.size() > 0){
return pathToGoal.getFirst();
}
else
{
return null;
}
} // getNextNode()
private boolean dfs(Node node, int depth){
if (!keepRunning || depth > limit) return false;
//node.setVisited(true);
isVisited.add(node);
visitCount++;
if (node.equals(goal)){
pathToGoal.addFirst(node);
//System.out.println("Goal Found by: " + start.hashCode());
time = System.currentTimeMillis() - time; //Stop the clock
//TraversatorStats.printStats(node, time, visitCount);
keepRunning = false;
return true;
}
Node[] children = node.adjacentNodes(maze);
for (int i = 0; i < children.length; i++) {
if (children[i] != null && !isVisited.contains(children[i])){
children[i].setParent(node);
if(dfs(children[i], depth + 1) == true) {
pathToGoal.addFirst(node);
return true;
}
}
}
return false;
}
} |
package fredboat.command.fun;
import com.mashape.unirest.http.HttpResponse;
import com.mashape.unirest.http.Unirest;
import com.mashape.unirest.http.async.Callback;
import com.mashape.unirest.http.exceptions.UnirestException;
import fredboat.commandmeta.abs.Command;
import fredboat.commandmeta.abs.ICommandOwnerRestricted;
import fredboat.util.CacheUtil;
import net.dv8tion.jda.core.MessageBuilder;
import net.dv8tion.jda.core.entities.Guild;
import net.dv8tion.jda.core.entities.Member;
import net.dv8tion.jda.core.entities.Message;
import net.dv8tion.jda.core.entities.TextChannel;
import fredboat.commandmeta.abs.ICommandDisabled;
import fredboat.util.DiscordUtil;
import java.io.File;
import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import fredboat.FredBoat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.io.FileUtils;
import fredboat.util.TextUtils;
public class SergalsCommand extends Command implements ICommandOwnerRestricted, ICommandDisabled {
private static final Logger log = LoggerFactory.getLogger(SergalCommand.class);
private static final Pattern IMAGE_PATTERN = Pattern.compile("\"file_url\":\"([^\"]+)");
private static final String BASE_URL = "https:
@Override
public void onInvoke(Guild guild, TextChannel channel, Member invoker, Message message, String[] args) {
channel.sendTyping().queue();
try {
String str = Unirest.get(BASE_URL).asString().getBody();
Matcher m = IMAGE_PATTERN.matcher(str);
if(!m.find()){
channel.sendMessage("Failed to extract image from " + BASE_URL).queue();
channel.sendMessage("INFO:" + str + m).queue();
return;
}
File tmp = CacheUtil.getImageFromURL(m.group(1));
channel.sendFile(tmp, null).queue();
log.info("IMG URL:" + m.group(1));
} catch (UnirestException e) {
channel.sendMessage("Failed to connect to " + BASE_URL).queue();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
} |
package com.splunk.shep.archiver.archive;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.impl.client.DefaultHttpClient;
import com.splunk.shep.archiver.model.Bucket;
import com.splunk.shep.archiver.model.FileNotDirectoryException;
public class BucketFreezer {
// CONFIG get this value from the config.
public static final String DEFAULT_SAFE_LOCATION = System
.getProperty("user.home") + "/" + BucketFreezer.class.getName();
private final String safeLocationForBuckets;
/* package-private */HttpClient httpClient;
protected BucketFreezer(String safeLocationForBuckets, HttpClient httpClient) {
this.safeLocationForBuckets = safeLocationForBuckets;
this.httpClient = httpClient;
}
public int freezeBucket(String path) {
try {
moveAndArchiveBucket(path);
return 0;
} catch (FileNotDirectoryException e) {
return 3;
} catch (FileNotFoundException e) {
return 4;
}
}
private void moveAndArchiveBucket(String path)
throws FileNotFoundException, FileNotDirectoryException {
Bucket bucket = Bucket.createWithAbsolutePath(path);
Bucket safeBucket = bucket.moveBucketToDir(getSafeLocation());
doRestCall(safeBucket);
}
private File getSafeLocation() {
File safeLocation = new File(safeLocationForBuckets);
safeLocation.mkdirs();
return safeLocation;
}
private void doRestCall(Bucket bucket) {
HttpUriRequest archiveBucketRequest = createBucketArchiveRequest(bucket);
try {
HttpResponse response = httpClient.execute(archiveBucketRequest); // LOG
handleResponseCodeFromDoingArchiveBucketRequest(response
.getStatusLine().getStatusCode());
} catch (ClientProtocolException e) {
hadleIOExceptionGenereratedByDoingArchiveBucketRequest(e);
} catch (IOException e) {
hadleIOExceptionGenereratedByDoingArchiveBucketRequest(e);
}
}
private void handleResponseCodeFromDoingArchiveBucketRequest(int statusCode) {
// TODO handle the different status codes
switch (statusCode) {
case HttpStatus.SC_OK:
// LOG
break;
case HttpStatus.SC_NO_CONTENT:
break;
default:
// LOG
throw new RuntimeException("Got the response code " + statusCode
+ " from making the archiveBucketRequest.");
}
}
private void hadleIOExceptionGenereratedByDoingArchiveBucketRequest(
IOException e) {
// LOG
// TODO this method should handle the errors in case the bucket transfer
// fails. In this state there is no way of telling if the bucket was
// actually trasfered or not.
throw new RuntimeException("Got IOException" + e);
}
private HttpUriRequest createBucketArchiveRequest(Bucket bucket) {
// CONFIG configure the host, port, request URL with a general
// solution.
String requestString = "http://localhost:9090/shep/rest/archiver/bucket/archive?path="
+ bucket.getDirectory().getAbsolutePath();
HttpGet request = new HttpGet(requestString);
return request;
}
public static BucketFreezer createWithDeafultSafeLocationAndHTTPClient() {
return new BucketFreezer(DEFAULT_SAFE_LOCATION, new DefaultHttpClient());
}
/* package-private */static void runMainWithDepentencies(Runtime runtime,
BucketFreezer bucketFreezer, String... args) {
if (args.length == 0) {
runtime.exit(1);
} else if (args.length >= 2) {
runtime.exit(2);
} else {
runtime.exit(bucketFreezer.freezeBucket(args[0]));
}
}
public static void main(String... args) {
runMainWithDepentencies(Runtime.getRuntime(),
BucketFreezer.createWithDeafultSafeLocationAndHTTPClient(),
args);
}
} |
package com.twitter.mesos.scheduler.log.mesos;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicLong;
import java.util.logging.Logger;
import javax.inject.Provider;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterables;
import com.google.common.collect.UnmodifiableIterator;
import com.google.common.primitives.Longs;
import com.google.inject.BindingAnnotation;
import com.google.inject.Inject;
import org.apache.mesos.Log;
import com.twitter.common.base.Function;
import com.twitter.common.base.MorePreconditions;
import com.twitter.common.inject.TimedInterceptor.Timed;
import com.twitter.common.quantity.Amount;
import com.twitter.common.quantity.Time;
import com.twitter.common.stats.Stats;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.PARAMETER;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* A {@code Log} implementation backed by a true distributed log in mesos core.
*
* @author John Sirois
*/
public class MesosLog implements com.twitter.mesos.scheduler.log.Log {
private static final Logger LOG = Logger.getLogger(MesosLog.class.getName());
/**
* Binding annotation for the opaque value of a log noop entry.
*/
@BindingAnnotation
@Retention(RUNTIME)
@Target({ PARAMETER, METHOD })
public @interface NoopEntry { }
/**
* Binding annotation for log read timeouts.
*/
@BindingAnnotation
@Retention(RUNTIME)
@Target({ PARAMETER, METHOD })
public @interface ReadTimeout { }
/**
* Binding annotation for log write timeouts - used for truncates and appends.
*/
@BindingAnnotation
@Retention(RUNTIME)
@Target({ PARAMETER, METHOD })
public @interface WriteTimeout { }
private final Provider<Log> logFactory;
private final Provider<Log.Reader> readerFactory;
private final Amount<Long, Time> readTimeout;
private final Provider<Log.Writer> writerFactory;
private final Amount<Long, Time> writeTimeout;
private final byte[] noopEntry;
/**
* Creates a new mesos log.
*
* @param logFactory Factory to provide access to log.
* @param readerFactory Factory to provide access to log readers.
* @param readTimeout Log read timeout.
* @param writerFactory Factory to provide access to log writers.
* @param writeTimeout Log write timeout.
* @param noopEntry A no-op log entry blob.
*/
@Inject
public MesosLog(
Provider<Log> logFactory,
Provider<Log.Reader> readerFactory,
@ReadTimeout Amount<Long, Time> readTimeout,
Provider<Log.Writer> writerFactory,
@WriteTimeout Amount<Long, Time> writeTimeout,
@NoopEntry byte[] noopEntry) {
this.logFactory = Preconditions.checkNotNull(logFactory);
this.readerFactory = Preconditions.checkNotNull(readerFactory);
this.readTimeout = readTimeout;
this.writerFactory = Preconditions.checkNotNull(writerFactory);
this.writeTimeout = writeTimeout;
this.noopEntry = Preconditions.checkNotNull(noopEntry);
}
@Override
public Stream open() {
return new LogStream(
logFactory.get(), readerFactory.get(), readTimeout, writerFactory, writeTimeout, noopEntry);
}
private static class LogStream implements com.twitter.mesos.scheduler.log.Log.Stream {
private static final class OpStats {
final String opName;
final AtomicLong total;
final AtomicLong timeouts;
final AtomicLong failures;
private OpStats(String opName) {
this.opName = MorePreconditions.checkNotBlank(opName);
total = exportLongStat("scheduler_log_native_%s_total", opName);
timeouts = exportLongStat("scheduler_log_native_%s_timeouts", opName);
failures = exportLongStat("scheduler_log_native_%s_failures", opName);
}
private static AtomicLong exportLongStat(String template, Object... args) {
return Stats.exportLong(String.format(template, args));
}
}
private static final Function<Log.Entry, LogEntry> MESOS_ENTRY_TO_ENTRY =
new Function<Log.Entry, LogEntry>() {
@Override public LogEntry apply(Log.Entry entry) {
return new LogEntry(entry);
}
};
private final OpStats read = new OpStats("read");
private final OpStats append = new OpStats("append");
private final OpStats truncate = new OpStats("truncate");
private final AtomicLong entriesSkipped =
Stats.exportLong("scheduler_log_native_native_entries_skipped");
private final Log log;
private final Log.Reader reader;
private final long readTimeout;
private final TimeUnit readTimeUnit;
private final Provider<Log.Writer> writerFactory;
private final long writeTimeout;
private final TimeUnit writeTimeUnit;
private final byte[] noopEntry;
private Log.Writer writer;
LogStream(Log log, Log.Reader reader, Amount<Long, Time> readTimeout,
Provider<Log.Writer> writerFactory, Amount<Long, Time> writeTimeout,
byte[] noopEntry) {
this.log = log;
this.reader = reader;
this.readTimeout = readTimeout.getValue();
this.readTimeUnit = readTimeout.getUnit().getTimeUnit();
this.writerFactory = writerFactory;
this.writeTimeout = writeTimeout.getValue();
this.writeTimeUnit = writeTimeout.getUnit().getTimeUnit();
this.noopEntry = noopEntry;
}
@Timed("scheduler_log_native_read_from")
@Override
public Iterator<Entry> readAll() throws StreamAccessException {
// TODO(John Sirois): Currently we must be the coordinator to ensure we get the 'full read'
// of log entries expected by the users of the com.twitter.mesos.scheduler.log.Log interface.
// Switch to another method of ensuring this when it becomes available in mesos' log
// interface.
try {
append(noopEntry);
} catch (StreamAccessException e) {
throw new StreamAccessException("Error writing noop prior to a read", e);
}
final Log.Position from = reader.beginning();
final Log.Position to = end().unwrap();
// Reading all the entries at once may cause large garbage collections. Instead, we
// lazily read the entries one by one as they are requested.
// TODO(Benjamin Hindman): Eventually replace this functionality with functionality
// from the Mesos Log.
return new UnmodifiableIterator<Entry>() {
private long position = Longs.fromByteArray(from.identity());
private final long endPosition = Longs.fromByteArray(to.identity());
private Entry entry = null;
@Override
public boolean hasNext() {
if (entry != null) {
return true;
}
while (position <= endPosition) {
try {
Log.Position p = log.position(Longs.toByteArray(position));
LOG.info("Reading position " + position + " from the log");
List<Log.Entry> entries = reader.read(p, p, readTimeout, readTimeUnit);
// N.B. HACK! There is currently no way to "increment" a position. Until the Mesos
// Log actually provides a way to "stream" the log, we approximate as much by
// using longs via Log.Position.identity and Log.position.
position++;
// Reading positions in this way means it's possible that we get an "invalid" entry
// (e.g., in the underlying log terminology this would be anything but an append)
// which will be removed from the returned entries resulting in an empty list.
// We skip these.
if (entries.isEmpty()) {
entriesSkipped.getAndIncrement();
continue;
} else {
entry = MESOS_ENTRY_TO_ENTRY.apply(Iterables.getOnlyElement(entries));
return true;
}
} catch (TimeoutException e) {
read.timeouts.getAndIncrement();
throw new StreamAccessException("Timeout reading from log.", e);
} catch (Log.OperationFailedException e) {
read.failures.getAndIncrement();
throw new StreamAccessException("Problem reading from log", e);
} finally {
read.total.getAndIncrement();
}
}
return false;
}
@Override
public Entry next() {
if (entry == null && !hasNext()) {
throw new NoSuchElementException();
}
Entry result = Preconditions.checkNotNull(entry);
entry = null;
return result;
}
};
}
@Timed("scheduler_log_native_append")
@Override
public LogPosition append(final byte[] contents) throws StreamAccessException {
Preconditions.checkNotNull(contents);
Log.Position position = mutate(append, new Mutation<Log.Position>() {
@Override public Log.Position apply(Log.Writer logWriter)
throws TimeoutException, Log.WriterFailedException {
return logWriter.append(contents, writeTimeout, writeTimeUnit);
}
});
return LogPosition.wrap(position);
}
@Timed("scheduler_log_native_truncate_before")
@Override
public void truncateBefore(com.twitter.mesos.scheduler.log.Log.Position position)
throws StreamAccessException {
Preconditions.checkArgument(position instanceof LogPosition);
final Log.Position before = ((LogPosition) position).unwrap();
mutate(truncate, new Mutation<Void>() {
@Override public Void apply(Log.Writer logWriter)
throws TimeoutException, Log.WriterFailedException {
logWriter.truncate(before, writeTimeout, writeTimeUnit);
return null;
}
});
}
private interface Mutation<T> {
T apply(Log.Writer writer) throws TimeoutException, Log.WriterFailedException;
}
private synchronized <T> T mutate(OpStats stats, Mutation<T> mutation) {
if (writer == null) {
writer = writerFactory.get();
}
try {
return mutation.apply(writer);
} catch (TimeoutException e) {
stats.timeouts.getAndIncrement();
throw new StreamAccessException("Timeout performing log " + stats.opName, e);
} catch (Log.WriterFailedException e) {
stats.failures.getAndIncrement();
// We must throw away a writer on any write failure - this could be because of a coordinator
// election in which case we must trigger a new election.
writer = null;
throw new StreamAccessException("Problem performing log" + stats.opName, e);
} finally {
stats.total.getAndIncrement();
}
}
private LogPosition end() {
return LogPosition.wrap(reader.ending());
}
@Override
public void close() {
// noop
}
private static class LogPosition implements com.twitter.mesos.scheduler.log.Log.Position {
private final Log.Position underlying;
LogPosition(Log.Position underlying) {
this.underlying = underlying;
}
static LogPosition wrap(Log.Position position) {
return new LogPosition(position);
}
Log.Position unwrap() {
return underlying;
}
@Override public int compareTo(Position o) {
Preconditions.checkArgument(o instanceof LogPosition);
return underlying.compareTo(((LogPosition) o).underlying);
}
}
private static class LogEntry implements com.twitter.mesos.scheduler.log.Log.Entry {
private final Log.Entry underlying;
public LogEntry(Log.Entry entry) {
this.underlying = entry;
}
@Override
public byte[] contents() {
return underlying.data;
}
}
}
} |
package org.jsmpp.session;
import org.jsmpp.bean.DeliverSm;
import org.jsmpp.extra.ProcessMessageException;
/**
* This listener will listen to every incoming short message, recognized by
* deliver_sm command. The logic on this listener should be accomplish in a
* short time, because the deliver_sm_resp will be processed after the logic
* executed. Normal logic will be return the deliver_sm_resp with zero valued
* command_status, or throw {@link ProcessMessageException} that gave non-zero
* valued command_status (in means negative response) depends on the given error
* code specified on the {@link ProcessMessageException}.
*
* @author uudashr
* @version 1.0
* @since 2.0
*
*/
public interface MessageReceiverListener {
/**
* Event that called when an short message accepted.
*
* @param deliverSm is the deliver_sm command.
* @throws ProcessMessageException throw if there should be return Non-OK
* command_status for the response.
*/
public void onAcceptDeliverSm(DeliverSm deliverSm)
throws ProcessMessageException;
} |
package net.sf.picard.illumina;
import net.sf.picard.illumina.parser.*;
import net.sf.picard.util.IlluminaUtil;
import net.sf.picard.util.Log;
import net.sf.picard.util.TabbedTextFileWithHeaderParser;
import net.sf.picard.cmdline.CommandLineProgram;
import net.sf.picard.cmdline.Option;
import net.sf.picard.cmdline.StandardOptionDefinitions;
import net.sf.picard.cmdline.Usage;
import net.sf.picard.io.IoUtil;
import net.sf.picard.metrics.MetricBase;
import net.sf.picard.metrics.MetricsFile;
import net.sf.samtools.util.SequenceUtil;
import net.sf.samtools.util.StringUtil;
import java.io.BufferedWriter;
import java.io.File;
import java.util.*;
import java.text.NumberFormat;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
/**
* Determine the barcode for each read in an Illumina lane.
* For each tile, a file is written to the basecalls directory of the form s_<lane>_<tile>_barcode.txt.
* An output file contains a line for each read in the tile, aligned with the regular basecall output
* The output file contains the following tab-separated columns:
* - read subsequence at barcode position
* - Y or N indicating if there was a barcode match
* - matched barcode sequence (empty if read did not match one of the barcodes). If there is no match
* but we're close to the threshold of calling it a match we output the barcode that would have been
* matched but in lower case
*
* @author jburke@broadinstitute.org
*/
public class ExtractIlluminaBarcodes extends CommandLineProgram {
// The following attributes define the command-line arguments
@Usage
public String USAGE =
getStandardUsagePreamble() + "Determine the barcode for each read in an Illumina lane.\n" +
"For each tile, a file is written to the basecalls directory of the form s_<lane>_<tile>_barcode.txt. " +
"An output file contains a line for each read in the tile, aligned with the regular basecall output. \n" +
"The output file contains the following tab-separated columns: \n" +
" * read subsequence at barcode position\n" +
" * Y or N indicating if there was a barcode match\n" +
" * matched barcode sequence\n" +
"Note that the order of specification of barcodes can cause arbitrary differences in output for poorly matching barcodes.\n\n";
@Option(doc="The Illumina basecalls directory. ", shortName="B")
public File BASECALLS_DIR;
@Option(doc="Where to write _barcode.txt files. By default, these are written to BASECALLS_DIR.", optional = true)
public File OUTPUT_DIR;
@Option(doc="Lane number. ", shortName= StandardOptionDefinitions.LANE_SHORT_NAME)
public Integer LANE;
@Option(doc= ReadStructure.PARAMETER_DOC, shortName="RS")
public String READ_STRUCTURE;
@Option(doc="Barcode sequence. These must be unique, and all the same length. This cannot be used with reads that " +
"have more than one barcode; use BARCODE_FILE in that case. ", mutex = {"BARCODE_FILE"})
public List<String> BARCODE = new ArrayList<String>();
@Option(doc="Tab-delimited file of barcode sequences, barcode name and, optionally, library name. " +
"Barcodes must be unique and all the same length. Column headers must be 'barcode_sequence_1', " +
"'barcode_sequence_2' (optional), 'barcode_name', and 'library_name'.", mutex = {"BARCODE"})
public File BARCODE_FILE;
@Option(doc="Per-barcode and per-lane metrics written to this file.", shortName = StandardOptionDefinitions.METRICS_FILE_SHORT_NAME)
public File METRICS_FILE;
@Option(doc="Maximum mismatches for a barcode to be considered a match.")
public int MAX_MISMATCHES = 1;
@Option(doc="Minimum difference between number of mismatches in the best and second best barcodes for a barcode to be considered a match.")
public int MIN_MISMATCH_DELTA = 1;
@Option(doc="Maximum allowable number of no-calls in a barcode read before it is considered unmatchable.")
public int MAX_NO_CALLS = 2;
@Option(shortName="Q", doc="Minimum base quality. Any barcode bases falling below this quality will be considered a mismatch even in the bases match.")
public int MINIMUM_BASE_QUALITY = 0;
@Option(shortName="GZIP", doc="Compress output s_l_t_barcode.txt files using gzip and append a .gz extension to the filenames.")
public boolean COMPRESS_OUTPUTS = false;
@Option(doc = "Run this many PerTileBarcodeExtractors in parallel. If NUM_PROCESSORS = 0, number of cores is automatically set to " +
"the number of cores available on the machine. If NUM_PROCESSORS < 0 then the number of cores used will be " +
"the number available on the machine less NUM_PROCESSORS.")
public int NUM_PROCESSORS = 1;
private final Log log = Log.getInstance(ExtractIlluminaBarcodes.class);
/** The read structure of the actual Illumina Run, i.e. the readStructure of the input data */
private ReadStructure readStructure;
/** The read structure of the output cluster data, this may be different from the input readStructure if there are SKIPs in the input readStructure */
private ReadStructure outputReadStructure;
private IlluminaDataProviderFactory factory;
private final Map<String,BarcodeMetric> barcodeToMetrics = new LinkedHashMap<String,BarcodeMetric>();
private BarcodeMetric noMatchMetric = null;
private final NumberFormat tileNumberFormatter = NumberFormat.getNumberInstance();
public ExtractIlluminaBarcodes() {
tileNumberFormatter.setMinimumIntegerDigits(4);
tileNumberFormatter.setGroupingUsed(false);
}
@Override
protected int doWork() {
IoUtil.assertDirectoryIsWritable(BASECALLS_DIR);
IoUtil.assertFileIsWritable(METRICS_FILE);
if (OUTPUT_DIR == null) {
OUTPUT_DIR = BASECALLS_DIR;
}
IoUtil.assertDirectoryIsWritable(OUTPUT_DIR);
// Create BarcodeMetric for counting reads that don't match any barcode
final String[] noMatchBarcode = new String[readStructure.barcodes.length()];
int index = 0;
for (final ReadDescriptor d : readStructure.descriptors) {
if (d.type == ReadType.Barcode) {
noMatchBarcode[index++] = StringUtil.repeatCharNTimes('N', d.length);
}
}
noMatchMetric = new BarcodeMetric(null, null, IlluminaUtil.barcodeSeqsToString(noMatchBarcode), noMatchBarcode);
final int numProcessors;
if (NUM_PROCESSORS == 0) {
numProcessors = Runtime.getRuntime().availableProcessors();
}
else if (NUM_PROCESSORS < 0) {
numProcessors = Runtime.getRuntime().availableProcessors() + NUM_PROCESSORS;
}
else {
numProcessors = NUM_PROCESSORS;
}
log.info("Processing with " + numProcessors + " PerTileBarcodeExtractor(s).");
final ExecutorService pool = Executors.newFixedThreadPool(numProcessors);
final List<PerTileBarcodeExtractor> extractors = new ArrayList<PerTileBarcodeExtractor>(factory.getAvailableTiles().size());
for (final int tile : factory.getAvailableTiles()) {
final PerTileBarcodeExtractor extractor = new PerTileBarcodeExtractor(tile, getBarcodeFile(tile));
pool.submit(extractor);
extractors.add(extractor);
}
pool.shutdown();
try {
// Wait a while for existing tasks to terminate
if (!pool.awaitTermination(6, TimeUnit.HOURS)) {
pool.shutdownNow(); // Cancel any still-executing tasks
// Wait a while for tasks to respond to being cancelled
if (!pool.awaitTermination(60, TimeUnit.SECONDS))
log.error("Pool did not terminate");
return 1;
}
}
catch (InterruptedException ie) {
// (Re-)Cancel if current thread also interrupted
pool.shutdownNow();
return 2;
}
log.info("Processed " + extractors.size() + " tiles.");
for (final PerTileBarcodeExtractor extractor : extractors) {
for (final String key : barcodeToMetrics.keySet()) {
barcodeToMetrics.get(key).merge(extractor.getMetrics().get(key));
}
noMatchMetric.merge(extractor.getNoMatchMetric());
if (extractor.getException() != null) {
log.error("Abandoning metrics calculation because one or more PerTileBarcodeExtractors failed.");
return 4;
}
}
// Finish metrics tallying.
int totalReads = noMatchMetric.READS;
int totalPfReads = noMatchMetric.PF_READS;
int totalPfReadsAssigned = 0;
for (final BarcodeMetric barcodeMetric : barcodeToMetrics.values()) {
totalReads += barcodeMetric.READS;
totalPfReads += barcodeMetric.PF_READS;
totalPfReadsAssigned += barcodeMetric.PF_READS;
}
if (totalReads > 0) {
noMatchMetric.PCT_MATCHES = noMatchMetric.READS/(double)totalReads;
double bestPctOfAllBarcodeMatches = 0;
for (final BarcodeMetric barcodeMetric : barcodeToMetrics.values()) {
barcodeMetric.PCT_MATCHES = barcodeMetric.READS/(double)totalReads;
if (barcodeMetric.PCT_MATCHES > bestPctOfAllBarcodeMatches) {
bestPctOfAllBarcodeMatches = barcodeMetric.PCT_MATCHES;
}
}
if (bestPctOfAllBarcodeMatches > 0) {
noMatchMetric.RATIO_THIS_BARCODE_TO_BEST_BARCODE_PCT =
noMatchMetric.PCT_MATCHES/bestPctOfAllBarcodeMatches;
for (final BarcodeMetric barcodeMetric : barcodeToMetrics.values()) {
barcodeMetric.RATIO_THIS_BARCODE_TO_BEST_BARCODE_PCT =
barcodeMetric.PCT_MATCHES/bestPctOfAllBarcodeMatches;
}
}
}
if (totalPfReads > 0) {
noMatchMetric.PF_PCT_MATCHES = noMatchMetric.PF_READS/(double)totalPfReads;
double bestPctOfAllBarcodeMatches = 0;
for (final BarcodeMetric barcodeMetric : barcodeToMetrics.values()) {
barcodeMetric.PF_PCT_MATCHES = barcodeMetric.PF_READS/(double)totalPfReads;
if (barcodeMetric.PF_PCT_MATCHES > bestPctOfAllBarcodeMatches) {
bestPctOfAllBarcodeMatches = barcodeMetric.PF_PCT_MATCHES;
}
}
if (bestPctOfAllBarcodeMatches > 0) {
noMatchMetric.PF_RATIO_THIS_BARCODE_TO_BEST_BARCODE_PCT =
noMatchMetric.PF_PCT_MATCHES/bestPctOfAllBarcodeMatches;
for (final BarcodeMetric barcodeMetric : barcodeToMetrics.values()) {
barcodeMetric.PF_RATIO_THIS_BARCODE_TO_BEST_BARCODE_PCT =
barcodeMetric.PF_PCT_MATCHES/bestPctOfAllBarcodeMatches;
}
}
}
// Calculate the normalized matches
if (totalPfReadsAssigned > 0) {
final double mean = (double) totalPfReadsAssigned / (double) barcodeToMetrics.values().size();
for (final BarcodeMetric m : barcodeToMetrics.values()) {
m.PF_NORMALIZED_MATCHES = m.PF_READS / mean;
}
}
final MetricsFile<BarcodeMetric, Integer> metrics = getMetricsFile();
for (final BarcodeMetric barcodeMetric : barcodeToMetrics.values()) {
metrics.addMetric(barcodeMetric);
}
metrics.addMetric(noMatchMetric);
metrics.write(METRICS_FILE);
return 0;
}
/**
* Create a barcode filename corresponding to the given tile qseq file.
*/
private File getBarcodeFile(final int tile) {
return new File(OUTPUT_DIR,
"s_" + LANE + "_" + tileNumberFormatter.format(tile) + "_barcode.txt" + (COMPRESS_OUTPUTS ? ".gz" : ""));
}
/**
* Validate that POSITION >= 1, and that all BARCODEs are the same length and unique
*
* @return null if command line is valid. If command line is invalid, returns an array of error message
* to be written to the appropriate place.
*/
@Override
protected String[] customCommandLineValidation() {
final ArrayList<String> messages = new ArrayList<String>();
/**
* In extract illumina barcodes we NEVER want to look at the template reads, therefore replace them with skips because
* IlluminaDataProvider and its factory will not open these nor produce ClusterData with the template reads in them, thus reducing
* the file IO and value copying done by the data provider
*/
readStructure = new ReadStructure(READ_STRUCTURE.replaceAll("T", "S"));
final IlluminaDataType[] datatypes = (MINIMUM_BASE_QUALITY > 0) ?
new IlluminaDataType[] {IlluminaDataType.BaseCalls, IlluminaDataType.PF, IlluminaDataType.QualityScores}:
new IlluminaDataType[] {IlluminaDataType.BaseCalls, IlluminaDataType.PF};
factory = new IlluminaDataProviderFactory(BASECALLS_DIR, LANE, readStructure, datatypes);
outputReadStructure = factory.getOutputReadStructure();
if (BARCODE_FILE != null) {
parseBarcodeFile(messages);
} else {
final Set<String> barcodes = new HashSet<String>();
for (final String barcode : BARCODE) {
if (barcodes.contains(barcode)) {
messages.add("Barcode " + barcode + " specified more than once.");
}
barcodes.add(barcode);
final BarcodeMetric metric = new BarcodeMetric(null, null, barcode, new String[]{barcode});
barcodeToMetrics.put(barcode, metric);
}
}
if (barcodeToMetrics.keySet().size() == 0) {
messages.add("No barcodes have been specified.");
}
if (messages.size() == 0) {
return null;
}
return messages.toArray(new String[messages.size()]);
}
public static void main(final String[] argv) {
System.exit(new ExtractIlluminaBarcodes().instanceMain(argv));
}
private static final String BARCODE_SEQUENCE_COLUMN = "barcode_sequence";
private static final String BARCODE_SEQUENCE_1_COLUMN = "barcode_sequence_1";
private static final String BARCODE_NAME_COLUMN = "barcode_name";
private static final String LIBRARY_NAME_COLUMN = "library_name";
private void parseBarcodeFile(final ArrayList<String> messages) {
final TabbedTextFileWithHeaderParser barcodesParser = new TabbedTextFileWithHeaderParser(BARCODE_FILE);
final String sequenceColumn = barcodesParser.hasColumn(BARCODE_SEQUENCE_COLUMN)
? BARCODE_SEQUENCE_COLUMN : barcodesParser.hasColumn(BARCODE_SEQUENCE_1_COLUMN)
? BARCODE_SEQUENCE_1_COLUMN : null;
if (sequenceColumn == null) {
messages.add(BARCODE_FILE + " does not have " + BARCODE_SEQUENCE_COLUMN + " or " +
BARCODE_SEQUENCE_1_COLUMN + " column header");
return;
}
final boolean hasBarcodeName = barcodesParser.hasColumn(BARCODE_NAME_COLUMN);
final boolean hasLibraryName = barcodesParser.hasColumn(LIBRARY_NAME_COLUMN);
final int numBarcodes = readStructure.barcodes.length();
final Set<String> barcodes = new HashSet<String>();
for (final TabbedTextFileWithHeaderParser.Row row : barcodesParser) {
final String bcStrings[] = new String[numBarcodes];
int barcodeNum = 1;
for (final ReadDescriptor rd : readStructure.descriptors) {
if (rd.type != ReadType.Barcode) continue;
final String header = barcodeNum == 1 ? sequenceColumn : "barcode_sequence_" + String.valueOf(barcodeNum);
bcStrings[barcodeNum-1] = row.getField(header);
barcodeNum++;
}
final String bcStr = IlluminaUtil.barcodeSeqsToString(bcStrings);
if (barcodes.contains(bcStr)) {
messages.add("Barcode " + bcStr + " specified more than once in " + BARCODE_FILE);
}
barcodes.add(bcStr);
final String barcodeName = (hasBarcodeName? row.getField(BARCODE_NAME_COLUMN): "");
final String libraryName = (hasLibraryName? row.getField(LIBRARY_NAME_COLUMN): "");
final BarcodeMetric metric = new BarcodeMetric(barcodeName, libraryName, bcStr, bcStrings);
barcodeToMetrics.put(StringUtil.join("", bcStrings), metric);
}
barcodesParser.close();
}
/**
* Metrics produced by the ExtractIlluminaBarcodes program that is used to parse data in
* the basecalls directory and determine to which barcode each read should be assigned.
*/
public static class BarcodeMetric extends MetricBase {
/**
* The barcode (from the set of expected barcodes) for which the following metrics apply.
* Note that the "symbolic" barcode of NNNNNN is used to report metrics for all reads that
* do not match a barcode.
*/
public String BARCODE;
public String BARCODE_NAME = "";
public String LIBRARY_NAME = "";
/** The total number of reads matching the barcode. */
public int READS = 0;
/** The number of PF reads matching this barcode (always less than or equal to READS). */
public int PF_READS = 0;
/** The number of all reads matching this barcode that matched with 0 errors or no-calls. */
public int PERFECT_MATCHES = 0;
/** The number of PF reads matching this barcode that matched with 0 errors or no-calls. */
public int PF_PERFECT_MATCHES = 0;
/** The number of all reads matching this barcode that matched with 1 error or no-call. */
public int ONE_MISMATCH_MATCHES = 0;
/** The number of PF reads matching this barcode that matched with 1 error or no-call. */
public int PF_ONE_MISMATCH_MATCHES = 0;
/** The percentage of all reads in the lane that matched to this barcode. */
public double PCT_MATCHES = 0d;
/**
* The rate of all reads matching this barcode to all reads matching the most prevelant barcode. For the
* most prevelant barcode this will be 1, for all others it will be less than 1 (except for the possible
* exception of when there are more orphan reads than for any other barcode, in which case the value
* may be arbitrarily large). One over the lowest number in this column gives you the fold-difference
* in representation between barcodes.
*/
public double RATIO_THIS_BARCODE_TO_BEST_BARCODE_PCT = 0d;
/** The percentage of PF reads in the lane that matched to this barcode. */
public double PF_PCT_MATCHES = 0d;
/**
* The rate of PF reads matching this barcode to PF reads matching the most prevelant barcode. For the
* most prevelant barcode this will be 1, for all others it will be less than 1 (except for the possible
* exception of when there are more orphan reads than for any other barcode, in which case the value
* may be arbitrarily large). One over the lowest number in this column gives you the fold-difference
* in representation of PF reads between barcodes.
*
*/
public double PF_RATIO_THIS_BARCODE_TO_BEST_BARCODE_PCT = 0d;
/**
* The "normalized" matches to each barcode. This is calculated as the number of pf reads matching
* this barcode over the sum of all pf reads matching any barcode (excluding orphans). If all barcodes
* are represented equally this will be 1.
*/
public double PF_NORMALIZED_MATCHES;
protected byte[][] barcodeBytes;
public BarcodeMetric(final String barcodeName, final String libraryName,
final String barcodeDisplay, final String[] barcodeSeqs) {
this.BARCODE = barcodeDisplay;
this.BARCODE_NAME = barcodeName;
this.LIBRARY_NAME = libraryName;
this.barcodeBytes = new byte[barcodeSeqs.length][];
for (int i = 0; i < barcodeSeqs.length; i++) {
barcodeBytes[i] = net.sf.samtools.util.StringUtil.stringToBytes(barcodeSeqs[i]);
}
}
/**
* This ctor is necessary for when reading metrics from file
*/
public BarcodeMetric() {
barcodeBytes = null;
}
/**
* Creates a copy of metric initialized with only non-accumulated and non-calculated values set
*/
public static BarcodeMetric copy(final BarcodeMetric metric) {
final BarcodeMetric result = new BarcodeMetric();
result.BARCODE = metric.BARCODE;
result.BARCODE_NAME = metric.BARCODE_NAME;
result.LIBRARY_NAME = metric.LIBRARY_NAME;
result.barcodeBytes = metric.barcodeBytes;
return result;
}
/**
* Adds the non-calculated
* @param metric
*/
public void merge(final BarcodeMetric metric) {
this.READS += metric.READS;
this.PF_READS += metric.PF_READS;
this.PERFECT_MATCHES += metric.PERFECT_MATCHES;
this.PF_PERFECT_MATCHES += metric.PF_PERFECT_MATCHES;
this.ONE_MISMATCH_MATCHES += metric.ONE_MISMATCH_MATCHES;
this.PF_ONE_MISMATCH_MATCHES += metric.PF_ONE_MISMATCH_MATCHES;
}
}
/**
* Extracts barcodes and accumulates metrics for an entire tile.
*/
private class PerTileBarcodeExtractor implements Runnable {
private final int tile;
private final File barcodeFile;
private final Map<String,BarcodeMetric> metrics;
private final BarcodeMetric noMatch;
private Exception exception = null;
private final boolean usingQualityScores= MINIMUM_BASE_QUALITY > 0;
/** Utility class to hang onto data about the best match for a given barcode */
class BarcodeMatch {
boolean matched;
String barcode;
int mismatches;
int mismatchesToSecondBest;
}
/**
* Constructor
* @param tile The number of the tile being processed; used for logging only.
* @param barcodeFile The file to write the barcodes to
*/
public PerTileBarcodeExtractor(final int tile, final File barcodeFile) {
this.tile = tile;
this.barcodeFile = barcodeFile;
this.metrics = new LinkedHashMap<String,BarcodeMetric>(barcodeToMetrics.size());
for (final String key : barcodeToMetrics.keySet()) {
this.metrics.put(key, BarcodeMetric.copy(barcodeToMetrics.get(key)));
}
this.noMatch = BarcodeMetric.copy(noMatchMetric);
}
// These methods return the results of the extraction
public synchronized Map<String,BarcodeMetric> getMetrics() { return this.metrics; }
public synchronized BarcodeMetric getNoMatchMetric() { return this.noMatch; }
public synchronized Exception getException() { return this.exception; }
/**
* run method which extracts barcodes and accumulates metrics for an entire tile
*/
synchronized public void run() {
log.info("Extracting barcodes for tile " + tile);
//Sometimes makeDataProvider takes a while waiting for slow file IO, for each tile the needed set of files
//is non-overlapping sets of files so make the data providers in the individual threads for PerTileBarcodeExtractors
//so they are not all waiting for each others file operations
final IlluminaDataProvider provider = factory.makeDataProvider(Arrays.asList(tile));
//Most likely we have SKIPS in our read structure since we replace all template reads with skips in the input data structure
//(see customCommnandLineValidation), therefore we must use the outputReadStructure to index into the output cluster data
final int [] barcodeIndices = outputReadStructure.barcodes.getIndices();
final BufferedWriter writer = IoUtil.openFileForBufferedWriting(barcodeFile);
try {
final byte barcodeSubsequences[][] = new byte[barcodeIndices.length][];
final byte qualityScores[][] = usingQualityScores ? new byte[barcodeIndices.length][] : null;
while (provider.hasNext()) {
// Extract the barcode from the cluster and write it to the file for the tile
final ClusterData cluster = provider.next();
for (int i = 0; i < barcodeIndices.length; i++) {
barcodeSubsequences[i] = cluster.getRead(barcodeIndices[i]).getBases();
if (usingQualityScores) qualityScores[i] = cluster.getRead(barcodeIndices[i]).getQualities();
}
final boolean passingFilter = cluster.isPf();
final BarcodeMatch match = findBestBarcodeAndUpdateMetrics(barcodeSubsequences, qualityScores, passingFilter, metrics, noMatchMetric);
final String yOrN = (match.matched ? "Y" : "N");
for (final byte[] bc : barcodeSubsequences) {
writer.write(StringUtil.bytesToString(bc));
}
writer.write("\t" + yOrN + "\t" + match.barcode + "\t" + String.valueOf(match.mismatches) +
"\t" + String.valueOf(match.mismatchesToSecondBest));
writer.newLine();
}
writer.close();
}
catch (Exception e) {
log.error(e, "Error processing tile ", this.tile);
this.exception = e;
}
}
/**
* Find the best barcode match for the given read sequence, and accumulate metrics
* @param readSubsequences portion of read containing barcode
* @param passingFilter PF flag for the current read
* @return perfect barcode string, if there was a match within tolerance, or null if not.
*/
private BarcodeMatch findBestBarcodeAndUpdateMetrics(final byte[][] readSubsequences,
final byte[][] qualityScores,
final boolean passingFilter,
final Map<String, BarcodeMetric> metrics,
final BarcodeMetric noMatchBarcodeMetric) {
BarcodeMetric bestBarcodeMetric = null;
int totalBarcodeReadBases = 0;
int numNoCalls = 0; // NoCalls are calculated for all the barcodes combined
for (final byte[] bc : readSubsequences) {
totalBarcodeReadBases += bc.length;
for (final byte b : bc) if (SequenceUtil.isNoCall(b)) ++numNoCalls;
}
// PIC-506 When forcing all reads to match a single barcode, allow a read to match even if every
// base is a mismatch.
int numMismatchesInBestBarcode = totalBarcodeReadBases + 1;
int numMismatchesInSecondBestBarcode = totalBarcodeReadBases + 1;
for (final BarcodeMetric barcodeMetric : metrics.values()) {
final int numMismatches = countMismatches(barcodeMetric.barcodeBytes, readSubsequences, qualityScores);
if (numMismatches < numMismatchesInBestBarcode) {
if (bestBarcodeMetric != null) {
numMismatchesInSecondBestBarcode = numMismatchesInBestBarcode;
}
numMismatchesInBestBarcode = numMismatches;
bestBarcodeMetric = barcodeMetric;
} else if (numMismatches < numMismatchesInSecondBestBarcode) {
numMismatchesInSecondBestBarcode = numMismatches;
}
}
final boolean matched = bestBarcodeMetric != null &&
numNoCalls <= MAX_NO_CALLS &&
numMismatchesInBestBarcode <= MAX_MISMATCHES &&
numMismatchesInSecondBestBarcode - numMismatchesInBestBarcode >= MIN_MISMATCH_DELTA;
final BarcodeMatch match = new BarcodeMatch();
// If we have something that's not a "match" but matches one barcode
// slightly, we output that matching barcode in lower case
if (numNoCalls + numMismatchesInBestBarcode < totalBarcodeReadBases) {
match.mismatches = numMismatchesInBestBarcode;
match.mismatchesToSecondBest = numMismatchesInSecondBestBarcode;
match.barcode = bestBarcodeMetric.BARCODE.toLowerCase().replaceAll(IlluminaUtil.BARCODE_DELIMITER, "");
}
else {
match.mismatches = totalBarcodeReadBases;
match.barcode = "";
}
if (matched) {
++bestBarcodeMetric.READS;
if (passingFilter) {
++bestBarcodeMetric.PF_READS;
}
if (numMismatchesInBestBarcode == 0) {
++bestBarcodeMetric.PERFECT_MATCHES;
if (passingFilter) {
++bestBarcodeMetric.PF_PERFECT_MATCHES;
}
} else if (numMismatchesInBestBarcode == 1) {
++bestBarcodeMetric.ONE_MISMATCH_MATCHES;
if (passingFilter) {
++bestBarcodeMetric.PF_ONE_MISMATCH_MATCHES;
}
}
match.matched = true;
match.barcode = bestBarcodeMetric.BARCODE.replaceAll(IlluminaUtil.BARCODE_DELIMITER, "");
}
else {
++noMatchBarcodeMetric.READS;
if (passingFilter) {
++noMatchBarcodeMetric.PF_READS;
}
}
return match;
}
/**
* Compare barcode sequence to bases from read
* @return how many bases did not match
*/
private int countMismatches(final byte[][] barcodeBytes, final byte[][] readSubsequence, final byte[][] qualities) {
int numMismatches = 0;
// Read sequence and barcode length may not be equal, so we just use the shorter of the two
for (int j = 0; j < barcodeBytes.length; j++) {
final int basesToCheck = Math.min(barcodeBytes[j].length, readSubsequence[j].length);
for (int i = 0; i < basesToCheck; ++i) {
if (!SequenceUtil.isNoCall(readSubsequence[j][i])) {
if (!SequenceUtil.basesEqual(barcodeBytes[j][i], readSubsequence[j][i])) ++numMismatches;
else if (qualities != null && qualities[j][i] < MINIMUM_BASE_QUALITY) ++numMismatches;
}
}
}
return numMismatches;
}
}
} |
package com.parc.ccn.security.access;
import java.io.IOException;
import java.security.InvalidKeyException;
import java.security.Key;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.SecureRandom;
import java.security.spec.AlgorithmParameterSpec;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.spec.SecretKeySpec;
import javax.jcr.AccessDeniedException;
import javax.xml.stream.XMLStreamException;
import org.bouncycastle.crypto.InvalidCipherTextException;
import com.parc.ccn.Library;
import com.parc.ccn.config.ConfigurationException;
import com.parc.ccn.data.ContentName;
import com.parc.ccn.data.content.CollectionData;
import com.parc.ccn.data.content.LinkReference;
import com.parc.ccn.data.security.PublicKeyObject;
import com.parc.ccn.data.security.PublisherID;
import com.parc.ccn.data.security.PublisherPublicKeyDigest;
import com.parc.ccn.data.security.WrappedKey;
import com.parc.ccn.data.security.WrappedKey.WrappedKeyObject;
import com.parc.ccn.data.util.DataUtils;
import com.parc.ccn.library.CCNLibrary;
import com.parc.ccn.library.EnumeratedNameList;
import com.parc.ccn.library.profiles.AccessControlProfile;
import com.parc.ccn.library.profiles.VersioningProfile;
import com.parc.ccn.security.access.ACL.ACLObject;
import com.parc.ccn.security.keys.KeyManager;
public class AccessControlManager {
/**
* Default data key length in bytes. No real reason this can't be bumped up to 32. It
* acts as the seed for a KDF, not an encryption key.
*/
public static final int DEFAULT_DATA_KEY_LENGTH = 16;
/**
* The keys we're wrapping are really seeds for a KDF, not keys in their own right.
* Eventually we'll use CMAC, so call them AES...
*/
public static final String DEFAULT_DATA_KEY_ALGORITHM = "AES";
/**
* This algorithm must be capable of key wrap (RSA, ElGamal, etc).
*/
public static final String DEFAULT_GROUP_KEY_ALGORITHM = "RSA";
public static final int DEFAULT_GROUP_KEY_LENGTH = 1024;
public static final String DATA_KEY_LABEL = "Data Key";
public static final String NODE_KEY_LABEL = "Node Key";
public static final long DEFAULT_TIMEOUT = 1000;
private ContentName _namespace;
private ContentName _groupStorage;
private EnumeratedNameList _groupList;
private ContentName _userStorage;
private EnumeratedNameList _userList;
// The groups whose membership information I've bothered to pull.
private HashMap<String, Group> _groupCache = new HashMap<String, Group>();
private HashSet<String> _myGroupMemberships = new HashSet<String>();
private HashSet<ContentName> _myIdentities = new HashSet<ContentName>();
private KeyCache _keyCache = new KeyCache();
private CCNLibrary _library;
private SecureRandom _random = new SecureRandom();
public AccessControlManager(ContentName namespace) throws ConfigurationException, IOException {
this(namespace, AccessControlProfile.groupNamespaceName(namespace), AccessControlProfile.userNamespaceName(namespace));
}
public AccessControlManager(ContentName namespace, ContentName groupStorage, ContentName userStorage) throws ConfigurationException, IOException {
_namespace = namespace;
_groupStorage = groupStorage;
_userStorage = userStorage;
_library = CCNLibrary.open();
// start enumerating groups and users in the background
groupList();
userList();
// DKS TODO here, check for a namespace marker, and if one not there, write it (async)
}
public void publishIdentity(ContentName identity, PublisherPublicKeyDigest myPublicKey) throws InvalidKeyException, IOException, ConfigurationException {
KeyManager km = KeyManager.getKeyManager();
if (null == myPublicKey) {
myPublicKey = km.getDefaultKeyID();
}
km.publishKey(identity, myPublicKey);
_myIdentities.add(identity);
}
public void publishIdentity(String userName, PublisherPublicKeyDigest myPublicKey) throws InvalidKeyException, IOException, ConfigurationException {
publishIdentity(AccessControlProfile.userNamespaceName(_userStorage, userName), myPublicKey);
}
public boolean haveIdentity(String userName) {
return _myIdentities.contains(AccessControlProfile.userNamespaceName(_userStorage, userName));
}
public boolean haveIdentity(ContentName userName) {
return _myIdentities.contains(userName);
}
/**
* Labels for deriving various types of keys.
* @return
*/
public String dataKeyLabel() {
return DATA_KEY_LABEL;
}
public String nodeKeyLabel() {
return NODE_KEY_LABEL;
}
CCNLibrary library() { return _library; }
KeyCache keyCache() { return _keyCache; }
public EnumeratedNameList groupList() throws IOException {
if (null == _groupList) {
_groupList = new EnumeratedNameList(_groupStorage, _library);
}
return _groupList;
}
public EnumeratedNameList userList() throws IOException {
if (null == _userList) {
_userList = new EnumeratedNameList(_userStorage, _library);
}
return _userList;
}
public boolean inProtectedNamespace(ContentName content) {
return _namespace.isPrefixOf(content);
}
public Group getGroup(String groupFriendlyName) throws IOException {
Group theGroup = _groupCache.get(groupFriendlyName);
if ((null == theGroup) && (groupList().hasChild(groupFriendlyName))) {
// Only go hunting for it if we think it exists, otherwise we'll block.
synchronized(_groupCache) {
theGroup = _groupCache.get(groupFriendlyName);
if (null == theGroup) {
theGroup = new Group(_groupStorage, groupFriendlyName, _library);
// wait for group to be ready?
_groupCache.put(groupFriendlyName, theGroup);
}
}
}
// either we've got it, or we don't believe it exists.
// DKS startup transients? do we need to block for group list?
return theGroup;
}
public void cacheGroup(Group newGroup) {
synchronized(_groupCache) {
_groupCache.put(newGroup.friendlyName(), newGroup);
}
}
public Group createGroup(String groupFriendlyName, ArrayList<LinkReference> newMembers) throws XMLStreamException, IOException {
Group existingGroup = getGroup(groupFriendlyName);
if (null != existingGroup) {
existingGroup.setMembershipList(newMembers);
return existingGroup;
} else {
// Need to make key pair, directory, and store membership list.
MembershipList ml =
new MembershipList(
AccessControlProfile.groupMembershipListName(_groupStorage, groupFriendlyName),
new CollectionData(newMembers), _library);
Group newGroup = new Group(_groupStorage, groupFriendlyName, ml, _library);
cacheGroup(newGroup);
// If I'm a group member (I end up knowing the private key of the group if I
// created it, but I could simply forget it...).
if (amCurrentGroupMember(newGroup)) {
_myGroupMemberships.add(groupFriendlyName);
}
return newGroup;
}
}
public Group modifyGroup(String friendlyName, ArrayList<LinkReference> membersToAdd, ArrayList<LinkReference> membersToRemove) throws XMLStreamException, IOException {
Group theGroup = getGroup(friendlyName);
// DKS we really want to be sure we get the group if it's out there...
if (null != theGroup) {
Library.logger().info("Got existing group to modify: " + theGroup);
theGroup.modify(membersToAdd, membersToRemove);
} else {
Library.logger().info("No existing group to modify: " + friendlyName + " adding new one.");
theGroup = createGroup(friendlyName, membersToAdd);
}
return theGroup;
}
public Group addUsers(String friendlyName, ArrayList<LinkReference> newUsers) throws XMLStreamException, IOException {
return modifyGroup(friendlyName, newUsers, null);
}
public Group removeUsers(String friendlyName, ArrayList<LinkReference> removedUsers) throws XMLStreamException, IOException {
return modifyGroup(friendlyName, null, removedUsers);
}
public void deleteGroup(String friendlyName) throws IOException {
Group existingGroup = getGroup(friendlyName);
// DKS we really want to be sure we get the group if it's out there...
if (null != existingGroup) {
Library.logger().info("Got existing group to delete: " + existingGroup);
existingGroup.delete();
} else {
Library.logger().warning("No existing group: " + friendlyName + ", ignoring delete request.");
}
}
/**
* Does this member refer to a user or a group. Groups have to be in the
* group namespace, users can be anywhere.
* @param member
* @return
*/
public boolean isGroup(LinkReference member) {
return _groupStorage.isPrefixOf(member.targetName());
}
public boolean isGroup(String principal) {
return _groupList.hasChild(principal);
}
public boolean haveKnownGroupMemberships() {
return _myGroupMemberships.size() > 0;
}
public boolean amKnownGroupMember(String principal) {
return _myGroupMemberships.contains(principal);
}
public boolean amCurrentGroupMember(String principal) throws IOException, XMLStreamException {
return amCurrentGroupMember(getGroup(principal));
}
/**
* Start out doing this the slow and simple way. Optimize later.
* @param group
* @return
* @throws IOException
* @throws XMLStreamException
*/
public boolean amCurrentGroupMember(Group group) throws IOException, XMLStreamException {
MembershipList ml = group.membershipList(); // will update
for (LinkReference lr : ml.membershipList().contents()) {
if (isGroup(lr)) {
String groupFriendlyName = AccessControlProfile.groupNameToFriendlyName(lr.targetName());
if (amCurrentGroupMember(groupFriendlyName)) {
_myGroupMemberships.add(groupFriendlyName);
return true;
} else {
// Don't need to test first. Won't remove if isn't there.
_myGroupMemberships.remove(groupFriendlyName);
}
} else {
// Not a group. Is it me?
if (haveIdentity(lr.targetName())) {
return true;
}
}
}
return false;
}
/**
* I already believe I should have access to this private key.
* @param group
* @param privateKeyVersion
* @return
* @throws XMLStreamException
* @throws IOException
* @throws InvalidCipherTextException
* @throws AccessDeniedException
* @throws InvalidKeyException
*/
public PrivateKey getGroupPrivateKey(String groupFriendlyName, Timestamp privateKeyVersion) throws InvalidKeyException, InvalidCipherTextException, IOException, XMLStreamException {
// Heuristic check
if (!amKnownGroupMember(groupFriendlyName)) {
Library.logger().info("Unexpected: we don't think we're a group member of group " + groupFriendlyName);
}
// Need to get the KeyDirectory for this version of the private key, or the
// latest if no version given.
KeyDirectory privateKeyDirectory = null;
PublicKey theGroupPublicKey = null;
if (null == privateKeyVersion) {
Group theGroup = getGroup(groupFriendlyName); // will pull latest public key
privateKeyDirectory = theGroup.privateKeyDirectory(this);
theGroupPublicKey = theGroup.publicKey();
} else {
// Assume one is there...
ContentName versionedPublicKeyName =
VersioningProfile.versionName(
AccessControlProfile.groupPublicKeyName(_groupStorage, groupFriendlyName),
privateKeyVersion);
privateKeyDirectory =
new KeyDirectory(this,
AccessControlProfile.groupPrivateKeyDirectory(versionedPublicKeyName), _library);
PublicKeyObject thisPublicKey = new PublicKeyObject(versionedPublicKeyName, _library);
theGroupPublicKey = thisPublicKey.publicKey();
}
if (null == privateKeyDirectory) {
Library.logger().info("Unexpected: null private key directory for group " + groupFriendlyName + " version " + privateKeyVersion + " as stamp " +
DataUtils.printHexBytes(DataUtils.timestampToBinaryTime12(privateKeyVersion)));
return null;
}
PrivateKey privateKey = privateKeyDirectory.getPrivateKey();
if (null != privateKey) {
keyCache().addPrivateKey(privateKeyDirectory.getName(), PublisherID.generatePublicKeyDigest(theGroupPublicKey),
privateKey);
}
return privateKey;
}
/**
* We might or might not still be a member of this group, or be a member
* again. This merely removes our cached notion that we are a member.
* @param principal
*/
public void removeGroupMembership(String principal) {
_myGroupMemberships.remove(principal);
}
protected Key getVersionedPrivateKeyForGroup(KeyDirectory keyDirectory, String principal) throws IOException, InvalidKeyException, AccessDeniedException, InvalidCipherTextException, XMLStreamException {
Key privateKey = getGroupPrivateKey(principal, keyDirectory.getPrincipals().get(principal));
if (null == privateKey) {
Library.logger().info("Unexpected: we beleive we are a member of group " + principal + " but cannot retrieve private key version: " + keyDirectory.getPrincipals().get(principal) + " our membership revoked?");
// Check to see if we are a current member.
if (!amCurrentGroupMember(principal)) {
// Removes this group from my list of known groups, adds it to my
// list of groups I don't believe I'm a member of.
removeGroupMembership(principal);
}
}
return privateKey;
}
/**
* Retrieves the latest version of an ACL effective at this node, either stored
* here or at one of its ancestors.
* @param nodeName
* @return
* @throws ConfigurationException
* @throws IOException
* @throws XMLStreamException
*/
public ACLObject getEffectiveACLObject(ContentName nodeName) throws XMLStreamException, IOException {
// Find the closest node that has a non-gone ACL
ACLObject aclo = findAncestorWithACL(nodeName);
if (null == aclo) {
Library.logger().warning("Unexpected: cannot find an ancestor of node " + nodeName + " that has an ACL.");
throw new IOException("Unexpected: cannot find an ancestor of node " + nodeName + " that has an ACL.");
}
return aclo;
}
private ACLObject findAncestorWithACL(ContentName dataNodeName) throws XMLStreamException, IOException {
ACLObject ancestorACLObject = null;
ContentName parentName = dataNodeName;
ContentName nextParentName = null;
while (null == ancestorACLObject) {
ancestorACLObject = getACLObjectForNodeIfExists(parentName);
if ((null != ancestorACLObject) && (ancestorACLObject.isGone())) {
Library.logger().info("Found an ACL object at " + ancestorACLObject.getName() + " but its GONE.");
ancestorACLObject = null;
}
nextParentName = parentName.parent();
if (nextParentName.equals(parentName)) {
break;
}
parentName = nextParentName;
}
if (null == ancestorACLObject) {
throw new IllegalStateException("No ACL available in ancestor tree for node : " + dataNodeName);
}
Library.logger().info("Found ACL for " + dataNodeName + " at ancestor :" + ancestorACLObject.getName());
return ancestorACLObject;
}
/**
* Try to pull an acl for a particular node. If it doesn't exist, will time
* out. Use enumeration to decide whether to call this to avoid the timeout.
* @param aclNodeName
* @return
* @throws IOException
* @throws XMLStreamException
*/
public ACLObject getACLObjectForNode(ContentName aclNodeName) throws XMLStreamException, IOException {
// Get the latest version of the acl. We don't care so much about knowing what version it was.
ACLObject aclo = new ACLObject(AccessControlProfile.aclName(aclNodeName), _library);
aclo.update();
// if there is no update, this will probably throw an exception -- IO or XMLStream
if (aclo.isGone()) {
// treat as if no acl on node
return null;
}
return aclo;
}
public ACLObject getACLObjectForNodeIfExists(ContentName aclNodeName) throws XMLStreamException, IOException {
EnumeratedNameList aclNameList = EnumeratedNameList.exists(AccessControlProfile.aclName(aclNodeName), aclNodeName, _library);
if (null != aclNameList) {
ContentName aclName = new ContentName(AccessControlProfile.aclName(aclNodeName),
aclNameList.getLatestVersionChildName().lastComponent());
Library.logger().info("Found latest version of acl for " + aclNodeName + " at " + aclName);
ACLObject aclo = new ACLObject(aclName, _library);
aclo.update();
if (aclo.isGone())
return null;
return aclo;
}
Library.logger().info("No ACL found on node: " + aclNodeName);
return null;
}
public ACL getEffectiveACL(ContentName nodeName) throws XMLStreamException, IOException {
ACLObject aclo = getEffectiveACLObject(nodeName);
if (null != aclo) {
return aclo.acl();
}
return null;
}
/**
* @throws InvalidKeyException
* Adds an ACL to a node that doesn't have one, or replaces one that exists.
* Just writes, doesn't bother to look at any current ACL. Does need to pull
* the effective node key at this node, though, to wrap the old ENK in a new
* node key.
* @throws IOException
* @throws XMLStreamException
* @throws
*/
public ACL setACL(ContentName nodeName, ACL newACL) throws XMLStreamException, IOException, InvalidKeyException {
NodeKey effectiveNodeKey = getEffectiveNodeKey(nodeName);
// generates the new node key, wraps it under the new acl, and wraps the old node key
generateNewNodeKey(nodeName, effectiveNodeKey, newACL);
// write the acl
ACLObject aclo = new ACLObject(AccessControlProfile.aclName(nodeName), newACL, _library);
// DKS FIX REPO WRITE
aclo.save();
return aclo.acl();
}
/**
* Pulls the ACL for this node, if one exists, and modifies it to include
* the following changes, then stores the result using setACL.
* @throws IOException
* @throws XMLStreamException
* @throws InvalidKeyException
*/
public ACL updateACL(ContentName nodeName,
ArrayList<LinkReference> addReaders, ArrayList<LinkReference> removeReaders,
ArrayList<LinkReference> addWriters, ArrayList<LinkReference> removeWriters,
ArrayList<LinkReference> addManagers, ArrayList<LinkReference> removeManagers) throws XMLStreamException, IOException, InvalidKeyException {
ACLObject currentACL = getACLObjectForNodeIfExists(nodeName);
ACL newACL = null;
if (null != currentACL) {
newACL = currentACL.acl();
} else {
newACL = new ACL();
}
// TODO Now update ACL to add and remove values.
// Managers are a subset of writers are a subset of readers. So if you remove someone
// as a reader, you remove them whether they are a reader, manager or writer.
// If you remove someone as a writer, you remove them whether they are a manager or a writer.
// Set the ACL and update the node key.
return setACL(nodeName, newACL);
}
public ACL addReaders(ContentName nodeName, ArrayList<LinkReference> newReaders) throws InvalidKeyException, XMLStreamException, IOException {
return updateACL(nodeName, newReaders, null, null, null, null, null);
}
public ACL addWriters(ContentName nodeName, ArrayList<LinkReference> newWriters) throws InvalidKeyException, XMLStreamException, IOException {
return updateACL(nodeName, null, null, newWriters, null, null, null);
}
public ACL addManagers(ContentName nodeName, ArrayList<LinkReference> newManagers) throws InvalidKeyException, XMLStreamException, IOException {
return updateACL(nodeName, null, null, null, null, newManagers, null);
}
/**
*
* Get the ancestor node key in force at this node (if we can decrypt it).
* @param nodeName
* @return null means while node keys exist, we can't decrypt any of them --
* we have no read access to this node (which implies no write access)
* @throws IOException if something is wrong (e.g. no node keys at all)
*/
protected NodeKey findAncestorWithNodeKey(ContentName nodeName) throws IOException {
// TODO Auto-generated method stub
// climb up looking for node keys, then make sure that one isn't GONE
// if it isn't, call read-side routine to figure out how to decrypt it
return null;
}
/**
* Write path: get the latest node key.
* @param nodeName
* @return
* @throws IOException
* @throws InvalidKeyException
* @throws InvalidCipherTextException
* @throws XMLStreamException
*/
public NodeKey getLatestNodeKeyForNode(ContentName nodeName) throws IOException, InvalidKeyException, InvalidCipherTextException, XMLStreamException {
// First we need to figure out what the latest version is of the node key.
ContentName nodeKeyVersionedName =
EnumeratedNameList.getLatestVersionName(AccessControlProfile.nodeKeyName(nodeName), _library);
// then, pull the node key we can decrypt
return getNodeKeyByVersionedName(nodeKeyVersionedName, null);
}
/**
* Read path:
* Retrieve a specific node key from a given location, as specified by a
* key it was used to wrap, and, if possible, find a key we can use to
* unwrap the node key.
*
* Throw an exception if there is no node key block at the appropriate name.
* @param nodeKeyName
* @param nodeKeyIdentifier
* @return
* @throws IOException
* @throws XMLStreamException
* @throws InvalidCipherTextException
* @throws InvalidKeyException
*/
public NodeKey getSpecificNodeKey(ContentName nodeKeyName, byte [] nodeKeyIdentifier) throws InvalidKeyException, InvalidCipherTextException, XMLStreamException, IOException {
if ((null == nodeKeyName) && (null == nodeKeyIdentifier)) {
throw new IllegalArgumentException("Node key name and identifier cannot both be null!");
}
// We should know what node key to use (down to the version), but we have to find the specific
// wrapped key copy we can decrypt.
NodeKey nk = getNodeKeyByVersionedName(nodeKeyName, nodeKeyIdentifier);
if (null == nk) {
Library.logger().warning("No decryptable node key available at " + nodeKeyName + ", access denied.");
return null;
}
return nk;
}
/**
* We have the name of a specific version of a node key. Now we just need to figure
* out which of our keys can be used to decrypt it.
* @param nodeKeyName
* @param nodeKeyIdentifier
* @return
* @throws IOException
* @throws XMLStreamException
* @throws InvalidKeyException
* @throws InvalidCipherTextException
*/
NodeKey getNodeKeyByVersionedName(ContentName nodeKeyName, byte [] nodeKeyIdentifier) throws XMLStreamException, IOException, InvalidKeyException, InvalidCipherTextException {
NodeKey nk = null;
KeyDirectory keyDirectory = null;
try {
keyDirectory = new KeyDirectory(this, nodeKeyName, _library);
// this will handle the caching.
Key unwrappedKey = keyDirectory.getUnwrappedKey(nodeKeyIdentifier);
if (null != unwrappedKey) {
nk = new NodeKey(nodeKeyName, unwrappedKey);
}
} finally {
if (null != keyDirectory) {
keyDirectory.stopEnumerating();
}
}
return nk;
}
/**
* Write path:
* Get the effective node key in force at this node, used to derive keys to
* encrypt content. Vertical chaining.
* @throws XMLStreamException
* @throws InvalidKeyException
* @throws IOException
*/
public NodeKey getEffectiveNodeKey(ContentName nodeName) throws InvalidKeyException, XMLStreamException, IOException {
// Get the ancestor node key in force at this node.
NodeKey nodeKey = findAncestorWithNodeKey(nodeName);
if (null == nodeKey) {
// TODO no access
throw new IllegalStateException("Cannot retrieve node key for node: " + nodeName + ".");
}
NodeKey effectiveNodeKey = nodeKey.computeDescendantNodeKey(nodeName, nodeKeyLabel());
Library.logger().info("Computing effective node key for " + nodeName + " using stored node key " + effectiveNodeKey.storedNodeKeyName());
return effectiveNodeKey;
}
/**
* Do we need to update this node key?
* @param theNodeKey
* @return
*/
public boolean nodeKeyIsDirty(NodeKey theNodeKey) {
}
/**
* We've looked for a node key we can decrypt at the expected node key location,
* but no dice. See if a new ACL has been interposed granting us rights at a lower
* portion of the tree.
* @param dataNodeName
* @param wrappingKeyName
* @param wrappingKeyIdentifier
* @return
* @throws IOException
* @throws XMLStreamException
* @throws InvalidCipherTextException
* @throws InvalidKeyException
*/
protected NodeKey getNodeKeyUsingInterposedACL(ContentName dataNodeName,
ContentName wrappingKeyName, byte[] wrappingKeyIdentifier) throws XMLStreamException, IOException, InvalidKeyException, InvalidCipherTextException {
ACLObject nearestACL = findAncestorWithACL(dataNodeName);
if (null == nearestACL) {
Library.logger().warning("Unexpected -- node with no ancestor ACL: " + dataNodeName);
// no dice
return null;
}
if (nearestACL.equals(AccessControlProfile.accessRoot(wrappingKeyName))) {
Library.logger().info("Node key: " + wrappingKeyName + " is the nearest ACL to " + dataNodeName);
return null;
}
NodeKey nk = getLatestNodeKeyForNode(AccessControlProfile.accessRoot(nearestACL.getName()));
return nk;
}
/**
* Make a new node key, encrypt it under the given ACL, and wrap its previous node key.
* Put all the blocks into the aggregating writer, but don't flush.
* @param nodeName
* @param effectiveNodeKey
* @param newACL
*/
protected void generateNewNodeKey(ContentName nodeName, NodeKey effectiveNodeKey, ACL effectiveACL) {
// TODO Auto-generated method stub
}
public NodeKey getNodeKeyForObject(ContentName nodeName, WrappedKeyObject wko) throws InvalidKeyException, XMLStreamException, InvalidCipherTextException, IOException {
// First, we go and look for the node key where the data key suggests
// it should be, and attempt to decrypt it from there.
NodeKey nk = getSpecificNodeKey(wko.wrappedKey().wrappingKeyName(),
wko.wrappedKey().wrappingKeyIdentifier());
if (null == nk) {
// OK, we will have gotten an exception if the node key simply didn't exist
// there, so this means that we don't have rights to read it there.
// The only way we might have rights not visible from this link is if an
// ACL has been interposed between where we are and the node key, and that
// ACL does give us rights.
nk = getNodeKeyUsingInterposedACL(nodeName, wko.wrappedKey().wrappingKeyName(),
wko.wrappedKey().wrappingKeyIdentifier());
if (null == nk) {
// Still can't find one we can read. Give up. Return null, and allow caller to throw the
// access exception.
return null;
}
}
NodeKey enk = nk.computeDescendantNodeKey(nodeName, dataKeyLabel());
return enk;
}
/**
* Used by content reader to retrieve the keys necessary to decrypt this content
* under this access control model.
* Given a data location, pull the data key block and decrypt it using
* whatever node keys are necessary.
* To turn the result of this into a key for decrypting content,
* follow the steps in the comments to {@link #generateAndStoreDataKey(ContentName)}.
* @param dataNodeName
* @return
* @throws IOException
* @throws XMLStreamException
* @throws InvalidKeyException
* @throws InvalidCipherTextException
*/
public byte [] getDataKey(ContentName dataNodeName) throws XMLStreamException, IOException, InvalidKeyException, InvalidCipherTextException {
WrappedKeyObject wdko = new WrappedKeyObject(AccessControlProfile.dataKeyName(dataNodeName), _library);
wdko.update();
if (null == wdko.wrappedKey()) {
Library.logger().warning("Could not retrieve data key for node: " + dataNodeName);
return null;
}
NodeKey enk = getNodeKeyForObject(dataNodeName, wdko);
if (null != enk) {
Key dataKey = wdko.wrappedKey().unwrapKey(enk.nodeKey());
return dataKey.getEncoded();
}
return null;
}
public void storeDataKey(ContentName dataNodeName, byte [] newRandomDataKey) throws InvalidKeyException, XMLStreamException, IOException {
NodeKey effectiveNodeKey = getEffectiveNodeKey(dataNodeName);
if (null == effectiveNodeKey) {
throw new IllegalStateException("Cannot retrieve effective node key for node: " + dataNodeName + ".");
}
Library.logger().info("Wrapping data key for node: " + dataNodeName + " with effective node key for node: " +
effectiveNodeKey.nodeName() + " derived from stored node key for node: " +
effectiveNodeKey.storedNodeKeyName());
WrappedKey wrappedDataKey = WrappedKey.wrapKey(new SecretKeySpec(newRandomDataKey, DEFAULT_DATA_KEY_ALGORITHM),
null, dataKeyLabel(),
effectiveNodeKey.nodeKey());
wrappedDataKey.setWrappingKeyIdentifier(effectiveNodeKey.storedNodeKeyID());
wrappedDataKey.setWrappingKeyName(effectiveNodeKey.storedNodeKeyName());
storeKeyContent(AccessControlProfile.dataKeyName(dataNodeName), wrappedDataKey);
}
/**
* Generate a random data key, store it, and return it to use to derive keys to encrypt
* content. All that's left is to call
* byte [] randomDataKey = generateAndStoreDataKey(dataNodeName);
* byte [][] keyandiv =
* KeyDerivationFunction.DeriveKeyForObject(randomDataKey, keyLabel,
* dataNodeName, dataPublisherPublicKeyDigest)
* and then give keyandiv to the segmenter to encrypt the data.
* @throws InvalidKeyException
* @throws XMLStreamException
* @throws IOException
**/
public byte [] generateAndStoreDataKey(ContentName dataNodeName) throws InvalidKeyException, XMLStreamException, IOException {
// Generate new random data key of appropriate length
byte [] dataKey = new byte[DEFAULT_DATA_KEY_LENGTH];
_random.nextBytes(dataKey);
storeDataKey(AccessControlProfile.dataKeyName(dataNodeName), dataKey);
return dataKey;
}
/**
* Actual output functions. Needs to get this into the repo.
* @param dataNodeName -- the content node for whom this is the data key.
* @param wrappedDataKey
* @throws IOException
* @throws XMLStreamException
*/
private void storeKeyContent(ContentName dataNodeName,
WrappedKey wrappedKey) throws XMLStreamException, IOException {
// DKS FIX FOR REPO
WrappedKeyObject wko = new WrappedKeyObject(AccessControlProfile.dataKeyName(dataNodeName), wrappedKey, _library);
wko.save();
}
} |
package org.javarosa.demo.shell;
import java.util.Hashtable;
import javax.microedition.lcdui.Displayable;
import org.javarosa.core.Context;
import org.javarosa.core.JavaRosaPlatform;
import org.javarosa.core.api.Constants;
import org.javarosa.core.api.IModule;
import org.javarosa.core.api.IShell;
import org.javarosa.core.model.storage.FormDataRMSUtility;
import org.javarosa.core.model.storage.FormDefRMSUtility;
import org.javarosa.core.util.WorkflowStack;
import org.javarosa.demo.module.FormListModule;
import org.javarosa.demo.module.SplashScreenModule;
import org.javarosa.xform.util.XFormUtils;
/**
* This is the shell for the JavaRosa demo that handles switching all of the views
* @author Brian DeRenzi
*
*/
public class JavaRosaDemoShell implements IShell {
// List of views that are used by this shell
FormListModule formModule = null;
SplashScreenModule splashScreen = null;
WorkflowStack stack;
Context context;
IModule currentModule;
public JavaRosaDemoShell() {
stack = new WorkflowStack();
context = new Context();
}
public void exitShell() {
}
public void run() {
init();
this.splashScreen = new SplashScreenModule(this, "/splash.gif");
this.formModule = new FormListModule(this,"Forms List");
this.splashScreen.start(context);
currentModule = splashScreen;
// switchView(ViewTypes.FORM_LIST);
}
private void init() {
FormDataRMSUtility formData = new FormDataRMSUtility("FormDataRMS");
FormDefRMSUtility formDef = new FormDefRMSUtility("FormDefRMS");
// For now let's add the dummy form.
if (formDef.getNumberOfRecords() == 0) {
formDef.writeToRMS(XFormUtils
.getFormFromResource("/hmis-a_draft.xhtml"));
formDef.writeToRMS(XFormUtils
.getFormFromResource("/hmis-b_draft.xhtml"));
formDef.writeToRMS(XFormUtils
.getFormFromResource("/shortform.xhtml"));
}
JavaRosaPlatform.instance().getStorageManager().getRMSStorageProvider()
.registerRMSUtility(formData);
JavaRosaPlatform.instance().getStorageManager().getRMSStorageProvider()
.registerRMSUtility(formDef);
}
private void workflow(IModule lastModule, String cmd, Hashtable returnVals) {
//TODO: parse any returnvals into context
if(stack.size() != 0) {
stack.pop().resume(context);
}
// TODO Auto-generated method stub
if( lastModule == this.splashScreen ) {
this.formModule.start(context);
currentModule = formModule;
}
}
/* (non-Javadoc)
* @see org.javarosa.shell.IShell#moduleCompeleted(org.javarosa.module.IModule)
*/
public void returnFromModule(IModule module, String returnCode, Hashtable returnVals) {
module.halt();
if(returnCode != Constants.MODULE_COMPLETE) {
stack.push(module);
}
workflow(module, returnCode, returnVals);
}
public void setDisplay(IModule callingModule, Displayable display) {
if(callingModule == currentModule) {
JavaRosaPlatform.instance().getDisplay().setCurrent(display);
}
}
} |
package alma.acs.logging;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Filter;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import alma.acs.logging.adapters.JacORBFilter;
import alma.acs.logging.config.LogConfig;
import alma.acs.logging.config.LogConfigSubscriber;
import alma.maci.loggingconfig.NamedLogger;
import alma.maci.loggingconfig.UnnamedLogger;
/**
* A <code>Logger</code> that attaches additional information to the produced <code>LogRecord</code>s.
* <p>
* Design note: the additional data (thread name, line of code) are really most interesting for the remotely sent log messages.
* Thus an alternative implementation could put the code from {@link #log(LogRecord)} into class {@link alma.acs.logging.AcsLoggingHandler},
* and not use a custom Logger class at all.
* The main reason we do it anyway is to avoid throwing the dummy exception (that delivers the stack trace) twice.
*
* @author hsommer
* created May 30, 2005 4:09:47 PM
*/
public class AcsLogger extends Logger implements LogConfigSubscriber {
// private in base class, need to redeclare here
protected static final int offValue = Level.OFF.intValue();
/** the logger class, which must be known to unwind the stack trace. Will be this class unless we use delegation. */
private Set<String> loggerClassNames = new HashSet<String>();
private String loggerName;
public AcsLogger(String name, String resourceBundleName, LogConfig logConfig) {
super(name, resourceBundleName);
addLoggerClass(getClass());
addLoggerClass(Logger.class);
logConfig.addSubscriber(this);
configureLogging(logConfig);
}
/**
* Optionally sets a logger name that can be different from the {@link Logger#name} passed in the constructor.
* The new name will be used for the <code>LogRecord</code>s produced by this class.
* This allows changing the name later on, e.g. when a container name or JUnit test name should be prepended to the simple name of a Corba logger.
* @param loggerName
*/
void setLoggerName(String loggerName) {
this.loggerName = loggerName;
}
/**
* Logs the given <code>LogRecord</code>.
* The record can be modified or dropped by the optional filters provided in {@link #addLogRecordFilter(alma.acs.logging.AcsLogger.LogRecordFilter)}.
* <p>
* Adding of context information:
* <ul>
* <li> If the LogRecord has a parameter that is a map which contains additional information
* about the line of code, thread, etc., the log record will be taken as provided, and no context
* information will be added. This can be useful if
* <ul>
* <li> the log record was reconstructed from a remote error by the ACS error handling code
* (see <code>AcsJException</code>), or
* <li> if in very exceptional cases application code needs to manipulate such information by hand.
* </ul>
* <li> otherwise, context information is inferred, similar to {@link LogRecord#inferCaller()},
* but additionally including thread name and line of code.
* </ul>
* Note that by overloading this method, we intercept all logging activities of the base class.
*
* @see java.util.logging.Logger#log(java.util.logging.LogRecord)
*/
public void log(LogRecord record) {
// Level could be null and must be inherited from the ancestor loggers,
// e.g. during JDK shutdown when the log level is nulled by the JDK LogManager
Logger loggerWithLevel = this;
while (loggerWithLevel.getLevel() == null) {
loggerWithLevel = loggerWithLevel.getParent();
}
// filter by log level to avoid unnecessary retrieval of context data.
// The same check will be repeated by the base class implementation of this method that gets called afterwards.
int levelValue = loggerWithLevel.getLevel().intValue();
if (record.getLevel().intValue() < levelValue || levelValue == offValue) {
return;
}
// modify the logger name if necessary
if (loggerName != null) {
record.setLoggerName(loggerName);
}
// check if this record alreay has the context data attached which ACS needs but the JDK logging API does not provide
LogParameterUtil paramUtil = new LogParameterUtil(record);
Map<String, Object> specialProperties = paramUtil.extractSpecialPropertiesMap();
if (specialProperties == null) {
// we prepend the special properties map to the other parameters
specialProperties = LogParameterUtil.createPropertiesMap();
List<Object> paramList = paramUtil.getNonSpecialPropertiesMapParameters();
paramList.add(0, specialProperties);
record.setParameters(paramList.toArray() );
String threadName = Thread.currentThread().getName();
specialProperties.put(LogParameterUtil.PARAM_THREAD_NAME, threadName);
// Get the stack trace
StackTraceElement stack[] = (new Throwable()).getStackTrace();
// search for the first frame before the "Logger" class.
int ix = 0;
while (ix < stack.length) {
StackTraceElement frame = stack[ix];
String cname = frame.getClassName();
if (!loggerClassNames.contains(cname)) {
// We've found the relevant frame.
record.setSourceClassName(cname);
record.setSourceMethodName(frame.getMethodName());
int lineNumber = frame.getLineNumber();
specialProperties.put(LogParameterUtil.PARAM_LINE, new Long(lineNumber));
break;
}
ix++;
}
// We haven't found a suitable frame, so just punt. This is
// OK as we are only committed to making a "best effort" here.
}
super.log(record);
}
/**
* @see alma.acs.logging.config.LogConfigSubscriber#configureLogging(alma.acs.logging.config.LogConfig)
*/
public void configureLogging(LogConfig logConfig) {
try {
NamedLogger config = logConfig.getSpecialLoggerConfig(getName());
configureJDKLogger(this, config);
} catch (Exception e) {
info("Failed to configure logger.");
}
// forward log level to optional JacORB filter
// Perhaps this dependency is too dirty, then we need a more general
// filter registration mechanism parallel to what the JDK foresees.
Filter logFilter = getFilter();
if (logFilter != null && logFilter instanceof JacORBFilter) {
((JacORBFilter) logFilter).setLogLevel(getLevel());
}
}
/**
* Service method for configuring even a non-ACS Logger.
* Shares code with {@link #configureLogging(LogConfig)}.
* @param jdkLogger
* @param logConfigData
*/
static void configureJDKLogger(Logger jdkLogger, UnnamedLogger loggerConfig) {
int minLogLevelACS; // small integer level
try {
// the logger must let through the lowest log level required for either local or remote logging.
minLogLevelACS = Math.min(loggerConfig.getMinLogLevel(), loggerConfig.getMinLogLevelLocal());
AcsLogLevel minLogLevelJDK = AcsLogLevel.fromAcsCoreLevel(minLogLevelACS); // JDK Level style
jdkLogger.setLevel(minLogLevelJDK);
} catch (Exception ex) {
jdkLogger.info("Failed to configure logger.");
}
}
/**
* Adds a logger class, which will be used to skip entries in the stack trace until the original logging method is found.
* If you have a delegation chain that involves loggers besides AcsLogger and the normal JDK Logger,
* make sure you call this method for each of them.
* @param loggerClass
*/
public void addLoggerClass(Class<?> loggerClass) {
loggerClassNames.add(loggerClass.getName());
}
} |
package org.apache.commons.dbcp;
import java.io.ByteArrayInputStream;
import java.sql.Connection;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Properties;
import javax.naming.Context;
import javax.naming.Name;
import javax.naming.RefAddr;
import javax.naming.Reference;
import javax.naming.spi.ObjectFactory;
import javax.sql.DataSource;
public class BasicDataSourceFactory implements ObjectFactory {
private final static String PROP_DEFAULTAUTOCOMMIT = "defaultAutoCommit";
private final static String PROP_DEFAULTREADONLY = "defaultReadOnly";
private final static String PROP_DEFAULTTRANSACTIONISOLATION = "defaultTransactionIsolation";
private final static String PROP_DEFAULTCATALOG = "defaultCatalog";
private final static String PROP_DRIVERCLASSNAME = "driverClassName";
private final static String PROP_MAXACTIVE = "maxActive";
private final static String PROP_MAXIDLE = "maxIdle";
private final static String PROP_MINIDLE = "minIdle";
private final static String PROP_MAXWAIT = "maxWait";
private final static String PROP_TESTONBORROW = "testOnBorrow";
private final static String PROP_TESTONRETURN = "testOnReturn";
private final static String PROP_TIMEBETWEENEVICTIONRUNSMILLIS = "timeBetweenEvictionRunsMillis";
private final static String PROP_NUMTESTSPEREVICTIONRUN = "numTestsPerEvictionRun";
private final static String PROP_MINEVICTABLEIDLETIMEMILLIS = "minEvictableIdleTimeMillis";
private final static String PROP_TESTWHILEIDLE = "testWhileIdle";
private final static String PROP_PASSWORD = "password";
private final static String PROP_URL = "url";
private final static String PROP_USERNAME = "username";
private final static String PROP_VALIDATIONQUERY = "validationQuery";
private final static String PROP_ACCESSTOUNDERLYINGCONNECTIONALLOWED = "accessToUnderlyingConnectionAllowed";
private final static String PROP_REMOVEABANDONED = "removeAbandoned";
private final static String PROP_REMOVEABANDONEDTIMEOUT = "removeAbandonedTimeout";
private final static String PROP_LOGABANDONED = "logAbandoned";
private final static String PROP_POOLPREPAREDSTATEMENTS = "poolPreparedStatements";
private final static String PROP_MAXOPENPREPAREDSTATEMENTS = "maxOpenPreparedStatements";
private final static String PROP_CONNECTIONPROPERTIES = "connectionProperties";
private final static String[] ALL_PROPERTIES = {
PROP_DEFAULTAUTOCOMMIT,
PROP_DEFAULTREADONLY,
PROP_DEFAULTTRANSACTIONISOLATION,
PROP_DEFAULTCATALOG,
PROP_DRIVERCLASSNAME,
PROP_MAXACTIVE,
PROP_MAXIDLE,
PROP_MINIDLE,
PROP_MAXWAIT,
PROP_TESTONBORROW,
PROP_TESTONRETURN,
PROP_TIMEBETWEENEVICTIONRUNSMILLIS,
PROP_NUMTESTSPEREVICTIONRUN,
PROP_MINEVICTABLEIDLETIMEMILLIS,
PROP_TESTWHILEIDLE,
PROP_PASSWORD,
PROP_URL,
PROP_USERNAME,
PROP_VALIDATIONQUERY,
PROP_ACCESSTOUNDERLYINGCONNECTIONALLOWED,
PROP_REMOVEABANDONED,
PROP_REMOVEABANDONEDTIMEOUT,
PROP_LOGABANDONED,
PROP_POOLPREPAREDSTATEMENTS,
PROP_MAXOPENPREPAREDSTATEMENTS,
PROP_CONNECTIONPROPERTIES
};
/**
* <p>Create and return a new <code>BasicDataSource</code> instance. If no
* instance can be created, return <code>null</code> instead.</p>
*
* @param obj The possibly null object containing location or
* reference information that can be used in creating an object
* @param name The name of this object relative to <code>nameCtx</code>
* @param nameCts The context relative to which the <code>name</code>
* parameter is specified, or <code>null</code> if <code>name</code>
* is relative to the default initial context
* @param environment The possibly null environment that is used in
* creating this object
*
* @exception Exception if an exception occurs creating the instance
*/
public Object getObjectInstance(Object obj, Name name, Context nameCtx,
Hashtable environment)
throws Exception {
// We only know how to deal with <code>javax.naming.Reference</code>s
// that specify a class name of "javax.sql.DataSource"
if ((obj == null) || !(obj instanceof Reference)) {
return null;
}
Reference ref = (Reference) obj;
if (!"javax.sql.DataSource".equals(ref.getClassName())) {
return null;
}
Properties properties = new Properties();
for (int i = 0 ; i < ALL_PROPERTIES.length ; i++) {
String propertyName = ALL_PROPERTIES[i];
RefAddr ra = ref.get(propertyName);
if (ra != null) {
String propertyValue = ra.getContent().toString();
properties.setProperty(propertyName, propertyValue);
}
}
return createDataSource(properties);
}
/**
* Creates and configures a BasicDataSource instance based on the
* given properties.
*/
public static DataSource createDataSource(Properties properties) throws Exception {
BasicDataSource dataSource = new BasicDataSource();
String value = null;
value = properties.getProperty(PROP_DEFAULTAUTOCOMMIT);
if (value != null) {
dataSource.setDefaultAutoCommit(Boolean.valueOf(value).booleanValue());
}
value = properties.getProperty(PROP_DEFAULTREADONLY);
if (value != null) {
dataSource.setDefaultReadOnly(Boolean.valueOf(value).booleanValue());
}
value = properties.getProperty(PROP_DEFAULTTRANSACTIONISOLATION);
if (value != null) {
int level = PoolableConnectionFactory.UNKNOWN_TRANSACTIONISOLATION;
if ("NONE".equalsIgnoreCase(value)) {
level = Connection.TRANSACTION_NONE;
}
else if ("READ_COMMITTED".equalsIgnoreCase(value)) {
level = Connection.TRANSACTION_READ_COMMITTED;
}
else if ("READ_UNCOMMITTED".equalsIgnoreCase(value)) {
level = Connection.TRANSACTION_READ_UNCOMMITTED;
}
else if ("REPEATABLE_READ".equalsIgnoreCase(value)) {
level = Connection.TRANSACTION_REPEATABLE_READ;
}
else if ("SERIALIZABLE".equalsIgnoreCase(value)) {
level = Connection.TRANSACTION_SERIALIZABLE;
}
else {
try {
level = Integer.parseInt(value);
} catch (NumberFormatException e) {
System.err.println("Could not parse defaultTransactionIsolation: " + value);
System.err.println("WARNING: defaultTransactionIsolation not set");
System.err.println("using default value of database driver");
level = PoolableConnectionFactory.UNKNOWN_TRANSACTIONISOLATION;
}
}
dataSource.setDefaultTransactionIsolation(level);
}
value = properties.getProperty(PROP_DEFAULTCATALOG);
if (value != null) {
dataSource.setDefaultCatalog(value);
}
value = properties.getProperty(PROP_DRIVERCLASSNAME);
if (value != null) {
dataSource.setDriverClassName(value);
}
value = properties.getProperty(PROP_MAXACTIVE);
if (value != null) {
dataSource.setMaxActive(Integer.parseInt(value));
}
value = properties.getProperty(PROP_MAXIDLE);
if (value != null) {
dataSource.setMaxIdle(Integer.parseInt(value));
}
value = properties.getProperty(PROP_MINIDLE);
if (value != null) {
dataSource.setMinIdle(Integer.parseInt(value));
}
value = properties.getProperty(PROP_MAXWAIT);
if (value != null) {
dataSource.setMaxWait(Long.parseLong(value));
}
value = properties.getProperty(PROP_TESTONBORROW);
if (value != null) {
dataSource.setTestOnBorrow(Boolean.valueOf(value).booleanValue());
}
value = properties.getProperty(PROP_TESTONRETURN);
if (value != null) {
dataSource.setTestOnReturn(Boolean.valueOf(value).booleanValue());
}
value = properties.getProperty(PROP_TIMEBETWEENEVICTIONRUNSMILLIS);
if (value != null) {
dataSource.setTimeBetweenEvictionRunsMillis(Long.parseLong(value));
}
value = properties.getProperty(PROP_NUMTESTSPEREVICTIONRUN);
if (value != null) {
dataSource.setNumTestsPerEvictionRun(Integer.parseInt(value));
}
value = properties.getProperty(PROP_MINEVICTABLEIDLETIMEMILLIS);
if (value != null) {
dataSource.setMinEvictableIdleTimeMillis(Long.parseLong(value));
}
value = properties.getProperty(PROP_TESTWHILEIDLE);
if (value != null) {
dataSource.setTestWhileIdle(Boolean.valueOf(value).booleanValue());
}
value = properties.getProperty(PROP_PASSWORD);
if (value != null) {
dataSource.setPassword(value);
}
value = properties.getProperty(PROP_URL);
if (value != null) {
dataSource.setUrl(value);
}
value = properties.getProperty(PROP_USERNAME);
if (value != null) {
dataSource.setUsername(value);
}
value = properties.getProperty(PROP_VALIDATIONQUERY);
if (value != null) {
dataSource.setValidationQuery(value);
}
value = properties.getProperty(PROP_ACCESSTOUNDERLYINGCONNECTIONALLOWED);
if (value != null) {
dataSource.setAccessToUnderlyingConnectionAllowed(Boolean.valueOf(value).booleanValue());
}
value = properties.getProperty(PROP_REMOVEABANDONED);
if (value != null) {
dataSource.setRemoveAbandoned(Boolean.valueOf(value).booleanValue());
}
value = properties.getProperty(PROP_REMOVEABANDONEDTIMEOUT);
if (value != null) {
dataSource.setRemoveAbandonedTimeout(Integer.parseInt(value));
}
value = properties.getProperty(PROP_LOGABANDONED);
if (value != null) {
dataSource.setLogAbandoned(Boolean.valueOf(value).booleanValue());
}
value = properties.getProperty(PROP_POOLPREPAREDSTATEMENTS);
if (value != null) {
dataSource.setPoolPreparedStatements(Boolean.valueOf(value).booleanValue());
}
value = properties.getProperty(PROP_MAXOPENPREPAREDSTATEMENTS);
if (value != null) {
dataSource.setMaxOpenPreparedStatements(Integer.parseInt(value));
}
value = properties.getProperty(PROP_CONNECTIONPROPERTIES);
if (value != null) {
Properties p = getProperties(value);
Enumeration e = p.propertyNames();
while (e.hasMoreElements()) {
String propertyName = (String) e.nextElement();
dataSource.addConnectionProperty(propertyName, p.getProperty(propertyName));
}
}
// Return the configured DataSource instance
return dataSource;
}
/**
* <p>Parse properties from the string. Format of the string must be [propertyName=property;]*<p>
* @param propText
* @return Properties
* @throws Exception
*/
static private Properties getProperties(String propText) throws Exception {
Properties p = new Properties();
if (propText != null) {
p.load(new ByteArrayInputStream(propText.replace(';', '\n').getBytes()));
}
return p;
}
} |
package org.apache.commons.logging.impl;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Vector;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogConfigurationException;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.LogSource;
public class LogFactoryImpl extends LogFactory {
/**
* Public no-arguments constructor required by the lookup mechanism.
*/
public LogFactoryImpl() {
super();
guessConfig();
}
// Defaulting to NullLogger means important messages will be lost if
// no other logger is available. This is as bad as having a catch() and
// ignoring the exception because 'it can't happen'
/**
* The fully qualified name of the default {@link Log} implementation.
*/
public static final String LOG_DEFAULT =
"org.apache.commons.logging.impl.SimpleLog";
/**
* The name of the system property identifying our {@link Log}
* implementation class.
*/
public static final String LOG_PROPERTY =
"org.apache.commons.logging.Log";
/**
* The deprecated system property used for backwards compatibility with
* the old {@link LogSource} class.
*/
protected static final String LOG_PROPERTY_OLD =
"org.apache.commons.logging.log";
/**
* The configuration attributes for this {@link LogFactory}.
*/
protected Hashtable attributes = new Hashtable();
/**
* The {@link Log} instances that have already been created, keyed by
* logger name.
*/
protected Hashtable instances = new Hashtable();
/**
* The one-argument constructor of the {@link Log} implementation class
* that will be used to create new instances. This value is initialized
* by <code>getLogConstructor()</code>, and then returned repeatedly.
*/
protected Constructor logConstructor = null;
protected LogFactory proxyFactory=null;
/**
* The signature of the Constructor to be used.
*/
protected Class logConstructorSignature[] =
{ java.lang.String.class };
/**
* The one-argument <code>setLogFactory</code> method of the selected
* {@link Log} method, if it exists.
*/
protected Method logMethod = null;
/**
* The signature of the <code>setLogFactory</code> method to be used.
*/
protected Class logMethodSignature[] =
{ LogFactory.class };
/**
* Return the configuration attribute with the specified name (if any),
* or <code>null</code> if there is no such attribute.
*
* @param name Name of the attribute to return
*/
public Object getAttribute(String name) {
if( proxyFactory != null )
return proxyFactory.getAttribute( name );
return (attributes.get(name));
}
/**
* Return an array containing the names of all currently defined
* configuration attributes. If there are no such attributes, a zero
* length array is returned.
*/
public String[] getAttributeNames() {
if( proxyFactory != null )
return proxyFactory.getAttributeNames();
Vector names = new Vector();
Enumeration keys = attributes.keys();
while (keys.hasMoreElements()) {
names.addElement((String) keys.nextElement());
}
String results[] = new String[names.size()];
for (int i = 0; i < results.length; i++) {
results[i] = (String) names.elementAt(i);
}
return (results);
}
/**
* Convenience method to derive a name from the specified class and
* call <code>getInstance(String)</code> with it.
*
* @param clazz Class for which a suitable Log name will be derived
*
* @exception LogConfigurationException if a suitable <code>Log</code>
* instance cannot be returned
*/
public Log getInstance(Class clazz)
throws LogConfigurationException
{
if( proxyFactory != null )
return proxyFactory.getInstance(clazz);
return (getInstance(clazz.getName()));
}
/**
* <p>Construct (if necessary) and return a <code>Log</code> instance,
* using the factory's current set of configuration attributes.</p>
*
* <p><strong>NOTE</strong> - Depending upon the implementation of
* the <code>LogFactory</code> you are using, the <code>Log</code>
* instance you are returned may or may not be local to the current
* application, and may or may not be returned again on a subsequent
* call with the same name argument.</p>
*
* @param name Logical name of the <code>Log</code> instance to be
* returned (the meaning of this name is only known to the underlying
* logging implementation that is being wrapped)
*
* @exception LogConfigurationException if a suitable <code>Log</code>
* instance cannot be returned
*/
public Log getInstance(String name)
throws LogConfigurationException
{
if( proxyFactory != null )
return proxyFactory.getInstance(name);
Log instance = (Log) instances.get(name);
if (instance == null) {
instance = newInstance(name);
instances.put(name, instance);
}
return (instance);
}
/**
* Release any internal references to previously created {@link Log}
* instances returned by this factory. This is useful environments
* like servlet containers, which implement application reloading by
* throwing away a ClassLoader. Dangling references to objects in that
* class loader would prevent garbage collection.
*/
public void release() {
if( proxyFactory != null )
proxyFactory.release();
instances.clear();
}
/**
* Remove any configuration attribute associated with the specified name.
* If there is no such attribute, no action is taken.
*
* @param name Name of the attribute to remove
*/
public void removeAttribute(String name) {
if( proxyFactory != null )
proxyFactory.removeAttribute(name);
attributes.remove(name);
}
/**
* Set the configuration attribute with the specified name. Calling
* this with a <code>null</code> value is equivalent to calling
* <code>removeAttribute(name)</code>.
*
* @param name Name of the attribute to set
* @param value Value of the attribute to set, or <code>null</code>
* to remove any setting for this attribute
*/
public void setAttribute(String name, Object value) {
if( proxyFactory != null )
proxyFactory.setAttribute(name,value);
if (value == null) {
attributes.remove(name);
} else {
attributes.put(name, value);
}
}
/**
* <p>Return the <code>Constructor</code> that can be called to instantiate
* new {@link Log} instances.</p>
*
* <p><strong>IMPLEMENTATION NOTE</strong> - Race conditions caused by
* calling this method from more than one thread are ignored, because
* the same <code>Constructor</code> instance will ultimately be derived
* in all circumstances.</p>
*
* @exception LogConfigurationException if a suitable constructor
* cannot be returned
*/
protected Constructor getLogConstructor()
throws LogConfigurationException {
// Return the previously identified Constructor (if any)
if (logConstructor != null) {
return (logConstructor);
}
// Identify the Log implementation class we will be using
String logClassName = null;
if (logClassName == null) {
logClassName = (String) getAttribute(LOG_PROPERTY);
}
if (logClassName == null) { // @deprecated
logClassName = (String) getAttribute(LOG_PROPERTY_OLD);
}
if (logClassName == null) {
try {
logClassName = System.getProperty(LOG_PROPERTY);
} catch (SecurityException e) {
;
}
}
if (logClassName == null) { // @deprecated
try {
logClassName = System.getProperty(LOG_PROPERTY_OLD);
} catch (SecurityException e) {
;
}
}
if ((logClassName == null) && isLog4JAvailable()) {
logClassName =
"org.apache.commons.logging.impl.Log4JCategoryLog";
}
if ((logClassName == null) && isJdk14Available()) {
logClassName =
"org.apache.commons.logging.impl.Jdk14Logger";
}
if (logClassName == null) {
logClassName = LOG_DEFAULT;
}
// Attempt to load the Log implementation class
Class logClass = null;
try {
logClass = loadClass(logClassName);
if (!Log.class.isAssignableFrom(logClass)) {
throw new LogConfigurationException
("Class " + logClassName + " does not implement Log");
}
} catch (Throwable t) {
throw new LogConfigurationException(t);
}
// Identify the <code>setLogFactory</code> method (if there is one)
try {
logMethod = logClass.getMethod("setLogFactory",
logMethodSignature);
} catch (Throwable t) {
logMethod = null;
}
// Identify the corresponding constructor to be used
try {
logConstructor = logClass.getConstructor(logConstructorSignature);
return (logConstructor);
} catch (Throwable t) {
throw new LogConfigurationException
("No suitable Log constructor " +
logConstructorSignature+ " for " + logClassName, t);
}
}
/** Load a class, try first the thread class loader, and
if it fails use the loader that loaded this class
*/
static Class loadClass( String name )
throws ClassNotFoundException
{
ClassLoader threadCL = getContextClassLoader();
try {
return threadCL.loadClass(name);
} catch( ClassNotFoundException ex ) {
return Class.forName( name );
}
}
protected void guessConfig() {
if( isLog4JAvailable() ) {
try {
Class proxyClass=
loadClass( "org.apache.commons.logging.impl.Log4jFactory" );
proxyFactory=(LogFactory)proxyClass.newInstance();
} catch( Throwable t ) {
proxyFactory=null;
}
}
// other logger specific initialization
}
/**
* Is <em>JDK 1.4 or later</em> logging available?
*/
protected boolean isJdk14Available() {
try {
loadClass("java.util.logging.Logger");
loadClass("org.apache.commons.logging.impl.Jdk14Logger");
return (true);
} catch (Throwable t) {
return (false);
}
}
/**
* Is a <em>Log4J</em> implementation available?
*/
protected boolean isLog4JAvailable() {
try {
loadClass("org.apache.log4j.Category");
return (true);
} catch (Throwable t) {
return (false);
}
}
/**
* Create and return a new {@link Log} instance for the specified name.
*
* @param name Name of the new logger
*
* @exception LogConfigurationException if a new instance cannot
* be created
*/
protected Log newInstance(String name)
throws LogConfigurationException {
Log instance = null;
try {
Object params[] = new Object[1];
params[0] = name;
instance = (Log) getLogConstructor().newInstance(params);
if (logMethod != null) {
params[0] = this;
logMethod.invoke(instance, params);
}
return (instance);
} catch (Throwable t) {
throw new LogConfigurationException(t);
}
}
} |
package org.tuckey.web.filters.urlrewrite;
import org.apache.commons.httpclient.Header;
import org.apache.commons.httpclient.HostConfiguration;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.ProxyHost;
import org.apache.commons.httpclient.SimpleHttpConnectionManager;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.methods.InputStreamRequestEntity;
import org.apache.commons.httpclient.methods.PostMethod;
import org.tuckey.web.filters.urlrewrite.utils.Log;
import org.tuckey.web.filters.urlrewrite.utils.StringUtils;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Enumeration;
public class RequestProxy {
private static final Log log = Log.getLog(RequestProxy.class);
/**
* This method performs the proxying of the request to the target address.
*
* @param target The target address. Has to be a fully qualified address. The request is send as-is to this address.
* @param hsRequest The request data which should be send to the
* @param hsResponse The response data which will contain the data returned by the proxied request to target.
* @throws java.io.IOException Passed on from the connection logic.
*/
public static void execute(final String target, final HttpServletRequest hsRequest, final HttpServletResponse hsResponse) throws IOException {
log.info("execute, target is " + target);
log.info("response commit state: " + hsResponse.isCommitted());
if (StringUtils.isBlank(target)) {
log.error("The target address is not given. Please provide a target address.");
return;
}
log.info("checking url");
final URL url;
try {
url = new URL(target);
} catch (MalformedURLException e) {
log.error("The provided target url is not valid.", e);
return;
}
log.info("seting up the host configuration");
final HostConfiguration config = new HostConfiguration();
ProxyHost proxyHost = getUseProxyServer((String) hsRequest.getAttribute("use-proxy"));
if (proxyHost != null) config.setProxyHost(proxyHost);
final int port = url.getPort() != -1 ? url.getPort() : url.getDefaultPort();
config.setHost(url.getHost(), port, "http");
log.info("config is " + config.toString());
final HttpMethod targetRequest = setupProxyRequest(hsRequest, url);
if (targetRequest == null) {
log.error("Unsupported request method found: " + hsRequest.getMethod());
return;
}
//perform the reqeust to the target server
final HttpClient client = new HttpClient(new SimpleHttpConnectionManager());
if (log.isInfoEnabled()) {
log.info("client state" + client.getState());
log.info("client params" + client.getParams().toString());
log.info("executeMethod / fetching data ...");
}
final int result = client.executeMethod(config, targetRequest);
//copy the target response headers to our response
setupResponseHeaders(targetRequest, hsResponse);
InputStream originalResponseStream = targetRequest.getResponseBodyAsStream();
//the body might be null, i.e. for responses with cache-headers which leave out the body
if (originalResponseStream != null) {
OutputStream responseStream = hsResponse.getOutputStream();
copyStream(originalResponseStream, responseStream);
}
log.info("set up response, result code was " + result);
}
public static void copyStream(InputStream in, OutputStream out) throws IOException {
byte[] buf = new byte[65536];
int count;
while ((count = in.read(buf)) != -1) {
out.write(buf, 0, count);
}
}
public static ProxyHost getUseProxyServer(String useProxyServer) {
ProxyHost proxyHost = null;
if (useProxyServer != null) {
String proxyHostStr = useProxyServer;
int colonIdx = proxyHostStr.indexOf(':');
if (colonIdx != -1) {
proxyHostStr = proxyHostStr.substring(0, colonIdx);
String proxyPortStr = useProxyServer.substring(colonIdx + 1);
if (proxyPortStr != null && proxyPortStr.length() > 0 && proxyPortStr.matches("[0-9]+")) {
int proxyPort = Integer.parseInt(proxyPortStr);
proxyHost = new ProxyHost(proxyHostStr, proxyPort);
} else {
proxyHost = new ProxyHost(proxyHostStr);
}
} else {
proxyHost = new ProxyHost(proxyHostStr);
}
}
return proxyHost;
}
private static HttpMethod setupProxyRequest(final HttpServletRequest hsRequest, final URL targetUrl) throws IOException {
final String methodName = hsRequest.getMethod();
final HttpMethod method;
if ("POST".equalsIgnoreCase(methodName)) {
PostMethod postMethod = new PostMethod();
InputStreamRequestEntity inputStreamRequestEntity = new InputStreamRequestEntity(hsRequest.getInputStream());
postMethod.setRequestEntity(inputStreamRequestEntity);
method = postMethod;
} else if ("GET".equalsIgnoreCase(methodName)) {
method = new GetMethod();
} else {
log.warn("Unsupported HTTP method requested: " + hsRequest.getMethod());
return null;
}
method.setFollowRedirects(false);
method.setPath(targetUrl.getPath());
method.setQueryString(targetUrl.getQuery());
Enumeration e = hsRequest.getHeaderNames();
if (e != null) {
while (e.hasMoreElements()) {
String headerName = (String) e.nextElement();
if ("host".equalsIgnoreCase(headerName)) {
//the host value is set by the http client
continue;
} else if ("content-length".equalsIgnoreCase(headerName)) {
//the content-length is managed by the http client
continue;
} else if ("accept-encoding".equalsIgnoreCase(headerName)) {
//the accepted encoding should only be those accepted by the http client.
//The response stream should (afaik) be deflated. If our http client does not support
//gzip then the response can not be unzipped and is delivered wrong.
continue;
} else if (headerName.toLowerCase().startsWith("cookie")) {
//fixme : don't set any cookies in the proxied request, this needs a cleaner solution
continue;
}
Enumeration values = hsRequest.getHeaders(headerName);
while (values.hasMoreElements()) {
String headerValue = (String) values.nextElement();
log.info("setting proxy request parameter:" + headerName + ", value: " + headerValue);
method.addRequestHeader(headerName, headerValue);
}
}
}
log.info("proxy query string " + method.getQueryString());
return method;
}
private static void setupResponseHeaders(HttpMethod httpMethod, HttpServletResponse hsResponse) {
if ( log.isInfoEnabled() ) {
log.info("setupResponseHeaders");
log.info("status text: " + httpMethod.getStatusText());
log.info("status line: " + httpMethod.getStatusLine());
}
//filter the headers, which are copied from the proxy response. The http lib handles those itself.
//Filtered out: the content encoding, the content length and cookies
for (int i = 0; i < httpMethod.getResponseHeaders().length; i++) {
Header h = httpMethod.getResponseHeaders()[i];
if ("content-encoding".equalsIgnoreCase(h.getName())) {
continue;
} else if ("content-length".equalsIgnoreCase(h.getName())) {
continue;
} else if ("transfer-encoding".equalsIgnoreCase(h.getName())) {
continue;
} else if (h.getName().toLowerCase().startsWith("cookie")) {
//retrieving a cookie which sets the session id will change the calling session: bad! So we skip this header.
continue;
} else if (h.getName().toLowerCase().startsWith("set-cookie")) {
//retrieving a cookie which sets the session id will change the calling session: bad! So we skip this header.
continue;
}
hsResponse.addHeader(h.getName(), h.getValue());
log.info("setting response parameter:" + h.getName() + ", value: " + h.getValue());
}
//fixme what about the response footers? (httpMethod.getResponseFooters())
if (httpMethod.getStatusCode() != 200) {
hsResponse.setStatus(httpMethod.getStatusCode());
}
}
} |
package org.helioviewer.gl3d.camera;
public class GL3DSpaceObject {
private final String urlName;
private final String labelName;
private final double sizeInMeters;
private static GL3DSpaceObject objectList[];
public static GL3DSpaceObject earth;
public static int LINESEPPLANETS = 9;
public static int LINESEPSATS = 15;
public static GL3DSpaceObject[] getObjectList() {
if (objectList == null) {
createObjectList();
}
return objectList;
}
private static void createObjectList() {
objectList = new GL3DSpaceObject[16];
objectList[0] = new GL3DSpaceObject("Mercury", "Mercury", 2439700);
objectList[1] = new GL3DSpaceObject("Venus", "Venus", 6051800);
objectList[2] = new GL3DSpaceObject("Earth", "Earth", 6371000);
earth = objectList[2];
objectList[3] = new GL3DSpaceObject("Moon", "Moon", 1737400);
objectList[4] = new GL3DSpaceObject("Mars%20Barycenter", "Mars", 3389500);
objectList[5] = new GL3DSpaceObject("Jupiter%20Barycenter", "Jupiter", 69911000);
objectList[6] = new GL3DSpaceObject("Saturn%20Barycenter", "Saturn", 58232000);
objectList[7] = new GL3DSpaceObject("Uranus%20Barycenter", "Uranus", 25362000);
objectList[8] = new GL3DSpaceObject("Neptune%20Barycenter", "Neptune", 24622000);
objectList[9] = new GL3DSpaceObject("Pluto%20Barycenter", "Pluto", 1195000);
objectList[10] = new GL3DSpaceObject("STEREO%20Ahead", "STEREO Ahead", 2);
objectList[11] = new GL3DSpaceObject("STEREO%20Behind", "STEREO Behind", 2);
objectList[12] = new GL3DSpaceObject("Solar%20Orbiter", "Solar Orbiter", 2);
objectList[13] = new GL3DSpaceObject("CHURYUMOV-GERASIMENKO", "67P/Churyumov-Gerasimenko", 2200);
objectList[14] = new GL3DSpaceObject("SDO", "SDO", 2);
objectList[15] = new GL3DSpaceObject("PROBA2", "PROBA2", 2);
}
private GL3DSpaceObject(String urlName, String labelName, double sizeInMeters) {
this.urlName = urlName;
this.labelName = labelName;
this.sizeInMeters = sizeInMeters;
}
public String getUrlName() {
return this.urlName;
}
@Override
public String toString() {
return this.labelName;
}
public double getSize() {
return this.sizeInMeters;
}
} |
package sx.lambda.mstojcevich.voxel.world;
import io.netty.util.internal.ConcurrentSet;
import sx.lambda.mstojcevich.voxel.VoxelGame;
import sx.lambda.mstojcevich.voxel.api.VoxelGameAPI;
import sx.lambda.mstojcevich.voxel.api.events.worldgen.EventFinishChunkGen;
import sx.lambda.mstojcevich.voxel.block.Block;
import sx.lambda.mstojcevich.voxel.entity.Entity;
import sx.lambda.mstojcevich.voxel.net.packet.client.PacketUnloadChunk;
import sx.lambda.mstojcevich.voxel.util.Vec3i;
import sx.lambda.mstojcevich.voxel.entity.EntityPosition;
import sx.lambda.mstojcevich.voxel.world.chunk.Chunk;
import sx.lambda.mstojcevich.voxel.world.chunk.IChunk;
import sx.lambda.mstojcevich.voxel.world.generation.ChunkGenerator;
import sx.lambda.mstojcevich.voxel.world.generation.SimplexChunkGenerator;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.LinkedBlockingDeque;
import static org.lwjgl.opengl.GL11.*;
public class World implements IWorld {
private static final int CHUNK_SIZE = 16;
private static final int WORLD_HEIGHT = 128;
private static final int SEA_LEVEL = 64;
private final Map<Vec3i, IChunk> chunkMap = new ConcurrentHashMap<>();
private final Set<IChunk> chunkList = new ConcurrentSet<>();
private static final float GRAVITY = 4.69f;
private static final float TERMINAL_VELOCITY = 56;
private final ChunkGenerator chunkGen;
private final boolean remote, server;
private List<Entity> loadedEntities = new CopyOnWriteArrayList<>();
private Set<IChunk> chunksToRerender = Collections.newSetFromMap(new ConcurrentHashMap<IChunk, Boolean>());
private Queue<Vec3i> sunlightQueue = new ConcurrentLinkedQueue<>();
public World(boolean remote, boolean server) {
this.remote = remote;
this.server = server;
if(!remote) {
this.chunkGen = new SimplexChunkGenerator(this, 200, new Random().nextInt());
} else {
this.chunkGen = null;
}
}
public int getChunkSize() {
return CHUNK_SIZE;
}
public int getHeight() { return WORLD_HEIGHT; }
public IChunk getChunkAtPosition(Vec3i position) {
Vec3i chunkPosition = new Vec3i(
getChunkPosition(position.x),
0,
getChunkPosition(position.z));
return this.chunkMap.get(chunkPosition);
}
public void render() {
if(!server) {
if(!chunksToRerender.isEmpty()) {
processLightQueue();
}
for(IChunk c : chunksToRerender) {
c.rerender();
chunksToRerender.remove(c);
}
long renderStartNS = System.nanoTime();
for (IChunk c : this.chunkList) {
if (VoxelGame.getInstance().getGameRenderer().getFrustum().cubeInFrustum(c.getStartPosition().x, c.getStartPosition().y, c.getStartPosition().z, CHUNK_SIZE, c.getHighestPoint())) {
glPushMatrix();
glTranslatef(c.getStartPosition().x, c.getStartPosition().y, c.getStartPosition().z);
c.render();
glPopMatrix();
}
}
for (IChunk c : this.chunkList) {
if (VoxelGame.getInstance().getGameRenderer().getFrustum().cubeInFrustum(c.getStartPosition().x, c.getStartPosition().y, c.getStartPosition().z, CHUNK_SIZE, c.getHighestPoint())) {
glPushMatrix();
glTranslatef(c.getStartPosition().x, c.getStartPosition().y, c.getStartPosition().z);
c.renderWater();
glPopMatrix();
}
}
if(VoxelGame.getInstance().numChunkRenders == 100) { // Reset every 100 renders
VoxelGame.getInstance().numChunkRenders = 0;
VoxelGame.getInstance().chunkRenderTimes = 0;
}
VoxelGame.getInstance().chunkRenderTimes += (int)(System.nanoTime() - renderStartNS);
VoxelGame.getInstance().numChunkRenders++;
} else {
System.err.println("Why the hell is the server running render?");
}
}
@Override
public void loadChunks(EntityPosition playerPosition, int viewDistance) {
if(!remote) { //don't gen chunks if we're not local
//TODO Make sure all of these values apply to the chunkGC check
this.getChunksInRange(playerPosition, viewDistance);
}
gcChunks(playerPosition, viewDistance);
}
@Override
public int getSeaLevel() { return SEA_LEVEL; }
@Override
public int getChunkPosition(float value) {
int subtraction = (int)(value%CHUNK_SIZE);
if(value <= 0 && subtraction != 0) {
subtraction = CHUNK_SIZE+subtraction;
}
return (int)(value-subtraction);
}
@Override
public float getGravity() {
return GRAVITY;
}
@Override
public float applyGravity(float velocity, long ms) {
if(ms < 0)ms = 0-ms;
return Math.max(-TERMINAL_VELOCITY, velocity-(getGravity()/1000)*(ms/10f));
}
@Override
public void removeBlock(final Vec3i position) {
synchronized (this) {
final IChunk c = this.getChunkAtPosition(position);
c.removeBlock(position);
if(!server) {
rerenderChunk(c);
if(Math.abs(position.x+(position.x<0?1:0)) % 16 == 15) {
if(position.x < 0) {
rerenderChunk(getChunkAtPosition(new Vec3i(position.x-1, position.y, position.z)));
} else {
rerenderChunk(getChunkAtPosition(new Vec3i(position.x+1, position.y, position.z)));
}
} else if(Math.abs(position.x+(position.x<0?1:0)) % 16 == 0) {
if(position.x < 0) {
rerenderChunk(getChunkAtPosition(new Vec3i(position.x+1, position.y, position.z)));
} else {
rerenderChunk(getChunkAtPosition(new Vec3i(position.x-1, position.y, position.z)));
}
}
if(Math.abs(position.z+(position.z<0?1:0)) % 16 == 15) {
if(position.z < 0) {
rerenderChunk(getChunkAtPosition(new Vec3i(position.x, position.y, position.z - 1)));
} else {
rerenderChunk(getChunkAtPosition(new Vec3i(position.x, position.y, position.z+1)));
}
} else if(Math.abs(position.z+(position.z<0?1:0)) % 16 == 0) {
if(position.z < 0) {
rerenderChunk(getChunkAtPosition(new Vec3i(position.x, position.y, position.z+1)));
} else {
rerenderChunk(getChunkAtPosition(new Vec3i(position.x, position.y, position.z-1)));
}
}
}
}
}
@Override
public void addBlock(Block block, final Vec3i position) {
synchronized(this) {
final IChunk c = this.getChunkAtPosition(position);
c.addBlock(block, position);
if(!server) {
rerenderChunk(c);
}
}
}
@Override
public IChunk[] getChunksInRange(EntityPosition epos, int viewDistance) {
List<IChunk> chunkList = new ArrayList<IChunk>();
int playerChunkX = getChunkPosition(epos.getX());
int playerChunkZ = getChunkPosition(epos.getZ());
int range = viewDistance*CHUNK_SIZE;
for (int x = playerChunkX - range; x <= playerChunkX + range; x += CHUNK_SIZE) {
for (int z = playerChunkZ - range; z <= playerChunkZ + range; z += CHUNK_SIZE) {
chunkList.add(loadChunk(x, z));
}
}
return chunkList.toArray(new IChunk[chunkList.size()]);
}
@Override
public void addChunk(final IChunk chunk) {
Vec3i pos = chunk.getStartPosition();
IChunk c = this.chunkMap.get(pos);
if(c != null) {
this.chunkMap.remove(pos);
this.chunkList.remove(c);
}
this.chunkMap.put(pos, chunk);
this.chunkList.add(chunk);
if(!server) {
rerenderChunk(chunk);
}
addSun(chunk);
}
@Override
public void gcChunks(EntityPosition playerPosition, int viewDistance) {
int range = viewDistance*CHUNK_SIZE;
int playerChunkX = getChunkPosition(playerPosition.getX());
int playerChunkZ = getChunkPosition(playerPosition.getZ());
for(Map.Entry<Vec3i, IChunk> e : this.chunkMap.entrySet()) {
Vec3i b = e.getKey();
if(Math.abs(b.x - playerChunkX) > range
|| Math.abs(b.z - playerChunkZ) > range) {
this.chunkList.remove(e.getValue());
this.chunkMap.get(b).unload();
this.chunkMap.remove(b);
if(remote) {
VoxelGame.getInstance().getServerChanCtx().writeAndFlush(new PacketUnloadChunk(b));
}
}
}
}
@Override
public List<Entity> getLoadedEntities() {
return this.loadedEntities;
}
private IChunk loadChunk(int startX, int startZ) {
Vec3i pos = new Vec3i(startX, 0, startZ);
IChunk foundChunk = chunkMap.get(pos);
if (foundChunk == null && !remote) {
final IChunk c = new Chunk(this, pos);
VoxelGameAPI.instance.getEventManager().push(new EventFinishChunkGen(c));
this.chunkMap.put(pos, c);
this.chunkList.add(c);
addSun(c);
if(!server) {
rerenderChunk(c);
}
return c;
} else {
return foundChunk;
}
}
private void addSun(IChunk c) {
for(int x = 0; x < CHUNK_SIZE; x++) {
for(int z = 0; z < CHUNK_SIZE; z++) {
c.setSunlight(x, WORLD_HEIGHT-1, z, 16);
addToSunlightQueue(new Vec3i(c.getStartPosition().x + x, WORLD_HEIGHT-1, c.getStartPosition().z + z));
}
}
c.finishChangingSunlight();
}
public void addEntity(Entity e) {
loadedEntities.add(e);
}
@Override
public void rerenderChunk(IChunk c) {
chunksToRerender.add(c);
}
@Override
public ChunkGenerator getChunkGen() {
return this.chunkGen;
}
/**
* Add a block to a list of blocks to process sunlight for
* The block at the position passed should be transparent or null and have a sunlight level greater than 0
*/
@Override
public void addToSunlightQueue(Vec3i block) {
sunlightQueue.add(block);
}
@Override
public void processLightQueue() {
if(!sunlightQueue.isEmpty()) {
Queue<IChunk> changedChunks = new LinkedBlockingDeque<>();
Vec3i pos;
while((pos = sunlightQueue.poll()) != null) {
IChunk posChunk = getChunkAtPosition(pos);
int ll = posChunk.getSunlight(pos.x, pos.y, pos.z);
int nextLL = ll-1;
Vec3i negXNeighborPos = pos.translate(-1,0,0);
Vec3i posXNeighborPos = pos.translate(1,0,0);
Vec3i negZNeighborPos = pos.translate(0,0,-1);
Vec3i posZNeighborPos = pos.translate(0,0,1);
IChunk negXNeighborChunk = getChunkAtPosition(negXNeighborPos);
IChunk posXNeighborChunk = getChunkAtPosition(posXNeighborPos);
IChunk negZNeighborChunk = getChunkAtPosition(negZNeighborPos);
IChunk posZNeighborChunk = getChunkAtPosition(posZNeighborPos);
if(negXNeighborChunk != null) {
Block bl = negXNeighborChunk.getBlockAtPosition(negXNeighborPos);
if(bl == null) {
if(negXNeighborChunk.getSunlight(negXNeighborPos.x, negXNeighborPos.y, negXNeighborPos.z) < nextLL) {
negXNeighborChunk.setSunlight(negXNeighborPos.x, negXNeighborPos.y, negXNeighborPos.z, nextLL);
sunlightQueue.add(negXNeighborPos);
changedChunks.add(negXNeighborChunk);
}
} else if(bl.isTransparent()) {
if(negXNeighborChunk.getSunlight(negXNeighborPos.x, negXNeighborPos.y, negXNeighborPos.z) < nextLL) {
negXNeighborChunk.setSunlight(negXNeighborPos.x, negXNeighborPos.y, negXNeighborPos.z, nextLL);
sunlightQueue.add(negXNeighborPos);
changedChunks.add(negXNeighborChunk);
}
}
}
if(posXNeighborChunk != null) {
Block bl = posXNeighborChunk.getBlockAtPosition(posXNeighborPos);
if(bl == null) {
if(posXNeighborChunk.getSunlight(posXNeighborPos.x, posXNeighborPos.y, posXNeighborPos.z) < nextLL) {
posXNeighborChunk.setSunlight(posXNeighborPos.x, posXNeighborPos.y, posXNeighborPos.z, nextLL);
sunlightQueue.add(posXNeighborPos);
changedChunks.add(posXNeighborChunk);
}
} else if(bl.isTransparent()) {
if(posXNeighborChunk.getSunlight(posXNeighborPos.x, posXNeighborPos.y, posXNeighborPos.z) < nextLL) {
posXNeighborChunk.setSunlight(posXNeighborPos.x, posXNeighborPos.y, posXNeighborPos.z, nextLL);
sunlightQueue.add(posXNeighborPos);
changedChunks.add(posXNeighborChunk);
}
}
}
if(negZNeighborChunk != null) {
Block bl = negZNeighborChunk.getBlockAtPosition(negZNeighborPos);
if(bl == null) {
if(negZNeighborChunk.getSunlight(negZNeighborPos.x, negZNeighborPos.y, negZNeighborPos.z) < nextLL) {
negZNeighborChunk.setSunlight(negZNeighborPos.x, negZNeighborPos.y, negZNeighborPos.z, nextLL);
sunlightQueue.add(negZNeighborPos);
changedChunks.add(negZNeighborChunk);
}
} else if(bl.isTransparent()) {
if(negZNeighborChunk.getSunlight(negZNeighborPos.x, negZNeighborPos.y, negZNeighborPos.z) < nextLL) {
negZNeighborChunk.setSunlight(negZNeighborPos.x, negZNeighborPos.y, negZNeighborPos.z, nextLL);
sunlightQueue.add(negZNeighborPos);
changedChunks.add(negZNeighborChunk);
}
}
}
if(posZNeighborChunk != null) {
Block bl = posZNeighborChunk.getBlockAtPosition(posZNeighborPos);
if(bl == null) {
if(posZNeighborChunk.getSunlight(posZNeighborPos.x, posZNeighborPos.y, posZNeighborPos.z) < nextLL) {
posZNeighborChunk.setSunlight(posZNeighborPos.x, posZNeighborPos.y, posZNeighborPos.z, nextLL);
sunlightQueue.add(posZNeighborPos);
changedChunks.add(posZNeighborChunk);
}
} else if(bl.isTransparent()) {
if(posZNeighborChunk.getSunlight(posZNeighborPos.x, posZNeighborPos.y, posZNeighborPos.z) < nextLL) {
posZNeighborChunk.setSunlight(posZNeighborPos.x, posZNeighborPos.y, posZNeighborPos.z, nextLL);
sunlightQueue.add(posZNeighborPos);
changedChunks.add(posZNeighborChunk);
}
}
}
if(pos.y > 0) {
Vec3i negYPos = pos.translate(0, -1, 0);
Block negYBlock = posChunk.getBlockAtPosition(negYPos);
if(negYBlock == null) {
if(ll == 16) {
if(posChunk.getSunlight(negYPos.x, negYPos.y, negYPos.z) < 16) {
posChunk.setSunlight(negYPos.x, negYPos.y, negYPos.z, 16);
sunlightQueue.add(negYPos);
changedChunks.add(posChunk);
}
} else {
if(posChunk.getSunlight(negYPos.x, negYPos.y, negYPos.z) < nextLL) {
posChunk.setSunlight(negYPos.x, negYPos.y, negYPos.z, nextLL);
sunlightQueue.add(negYPos);
changedChunks.add(posChunk);
}
}
} else if(negYBlock.isTransparent()) {
if(ll == 16) {
if(posChunk.getSunlight(negYPos.x, negYPos.y, negYPos.z) < 16) {
posChunk.setSunlight(negYPos.x, negYPos.y, negYPos.z, 16);
sunlightQueue.add(negYPos);
changedChunks.add(posChunk);
}
} else {
if(posChunk.getSunlight(negYPos.x, negYPos.y, negYPos.z) < nextLL) {
posChunk.setSunlight(negYPos.x, negYPos.y, negYPos.z, nextLL);
sunlightQueue.add(negYPos);
changedChunks.add(posChunk);
}
}
}
}
}
IChunk changedChunk;
while((changedChunk = changedChunks.poll()) != null) {
changedChunk.finishChangingSunlight();
}
}
}
@Override
public float getLightLevel(Vec3i pos) {
IChunk chunk = getChunkAtPosition(pos);
if(chunk == null) {
return 1;
}
return chunk.getLightLevel(pos.x, pos.y, pos.z);
}
} |
package be.bagofwords.db.filedb;
import be.bagofwords.application.BowTaskScheduler;
import be.bagofwords.application.memory.MemoryGobbler;
import be.bagofwords.application.memory.MemoryManager;
import be.bagofwords.application.memory.MemoryStatus;
import be.bagofwords.db.CoreDataInterface;
import be.bagofwords.db.DBUtils;
import be.bagofwords.db.combinator.Combinator;
import be.bagofwords.iterator.CloseableIterator;
import be.bagofwords.iterator.IterableUtils;
import be.bagofwords.iterator.SimpleIterator;
import be.bagofwords.ui.UI;
import be.bagofwords.util.KeyValue;
import be.bagofwords.util.MappedLists;
import be.bagofwords.util.Pair;
import be.bagofwords.util.SerializationUtils;
import org.apache.commons.io.IOUtils;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.*;
import java.util.stream.Collectors;
public class FileDataInterface<T extends Object> extends CoreDataInterface<T> implements MemoryGobbler {
private static final long MAX_FILE_SIZE_WRITE = 50 * 1024 * 1024;
private static final long MAX_FILE_SIZE_READ = 10 * 1024 * 1024;
private static final long BITS_TO_DISCARD_FOR_FILE_BUCKETS = 58;
private static final int BATCH_SIZE_PRIMITIVE_VALUES = 100000;
private static final int BATCH_SIZE_NON_PRIMITIVE_VALUES = 100;
private static final String META_FILE = "META_FILE";
private static final String LOCK_FILE = "LOCK";
private static final int LONG_SIZE = 8;
private static final int INT_SIZE = 4;
private MemoryManager memoryManager;
private File directory;
private List<FileBucket> fileBuckets;
private final int sizeOfValues;
private final long randomId;
private final String sizeOfCachedFileContentsLock = new String("LOCK");
private final long maxSizeOfCachedFileContents = Runtime.getRuntime().maxMemory() / 3;
private long currentSizeOfCachedFileContents;
private long timeOfLastWrite;
private long timeOfLastRead;
private boolean metaFileOutOfSync;
public FileDataInterface(MemoryManager memoryManager, Combinator<T> combinator, Class<T> objectClass, String directory, String nameOfSubset, boolean isTemporaryDataInterface, BowTaskScheduler taskScheduler) {
super(nameOfSubset, objectClass, combinator, isTemporaryDataInterface);
this.directory = new File(directory, nameOfSubset);
this.sizeOfValues = SerializationUtils.getWidth(objectClass);
this.randomId = new Random().nextLong();
this.memoryManager = memoryManager;
timeOfLastRead = 0;
checkDataDir();
MetaFile metaFile = readMetaInfo();
initializeFiles(metaFile);
writeLockFile(randomId);
currentSizeOfCachedFileContents = 0;
taskScheduler.schedulePeriodicTask(() -> ifNotClosed(() -> {
rewriteAllFiles(false);
checkLock();
}), 1000); //rewrite files that are too large
}
@Override
public T read(long key) {
FileBucket bucket = getBucket(key);
lockForRead(bucket);
FileInfo file = bucket.getFile(key);
try {
int startPos;
int pos = Arrays.binarySearch(file.getFileLocationsKeys(), key);
if (pos == -1) {
//Before first key, value can not be in file
return null;
} else {
if (pos < 0) {
pos = -(pos + 1);
}
if (pos == file.getFileLocationsKeys().length || file.getFileLocationsKeys()[pos] > key) {
pos
}
startPos = file.getFileLocationsValues()[pos];
}
int endPos = pos + 1 < file.getFileLocationsKeys().length ? file.getFileLocationsValues()[pos + 1] : file.getReadSize();
ReadBuffer readBuffer = getReadBuffer(file, startPos, endPos);
startPos -= readBuffer.getOffset();
endPos -= readBuffer.getOffset();
byte firstByteOfKeyToRead = (byte) (key >> 56);
byte[] buffer = readBuffer.getBuffer();
int position = startPos;
while (position < endPos) {
byte currentByte = buffer[position];
if (currentByte == firstByteOfKeyToRead) {
long currentKey = SerializationUtils.bytesToLong(buffer, position);
position += LONG_SIZE;
if (currentKey == key) {
ReadValue<T> readValue = readValue(buffer, position);
return readValue.getValue();
} else if (currentKey > key) {
return null;
} else {
//skip value
position += skipValue(buffer, position);
}
} else if (currentByte > firstByteOfKeyToRead) {
//key too large, value not in this file
return null;
} else if (currentByte < firstByteOfKeyToRead) {
//key too small, skip key and value
position += LONG_SIZE;
position += skipValue(buffer, position);
}
}
return null;
} catch (Exception exp) {
throw new RuntimeException("Error in file " + toFile(file).getAbsolutePath(), exp);
} finally {
dataWasRead();
bucket.unlockRead();
}
}
@Override
public void write(long key, T value) {
FileBucket bucket = getBucket(key);
bucket.lockWrite();
FileInfo file = bucket.getFile(key);
try {
DataOutputStream dos = getAppendingOutputStream(file);
int extraSize = writeValue(dos, key, value);
dos.close();
file.increaseWriteSize(extraSize);
dataWasWritten();
} catch (Exception e) {
throw new RuntimeException("Failed to write value with key " + key + " to file " + toFile(file).getAbsolutePath(), e);
} finally {
bucket.unlockWrite();
}
}
@Override
public void write(Iterator<KeyValue<T>> entries) {
long batchSize = getBatchSize();
while (entries.hasNext()) {
MappedLists<FileBucket, KeyValue<T>> entriesToFileBuckets = new MappedLists<>();
int numRead = 0;
while (numRead < batchSize && entries.hasNext()) {
KeyValue<T> curr = entries.next();
FileBucket fileBucket = getBucket(curr.getKey());
entriesToFileBuckets.get(fileBucket).add(curr);
numRead++;
}
long totalSizeWrittenInBatch = 0;
for (FileBucket bucket : entriesToFileBuckets.keySet()) {
List<KeyValue<T>> values = entriesToFileBuckets.get(bucket);
bucket.lockWrite();
try {
MappedLists<FileInfo, KeyValue<T>> entriesToFiles = new MappedLists<>();
for (KeyValue<T> value : values) {
FileInfo file = bucket.getFile(value.getKey());
entriesToFiles.get(file).add(value);
}
for (FileInfo file : entriesToFiles.keySet()) {
try {
List<KeyValue<T>> valuesForFile = entriesToFiles.get(file);
DataOutputStream dos = getAppendingOutputStream(file);
for (KeyValue<T> value : valuesForFile) {
int extraSize = writeValue(dos, value.getKey(), value.getValue());
file.increaseWriteSize(extraSize);
totalSizeWrittenInBatch += extraSize;
}
dataWasWritten();
dos.close();
} catch (Exception exp) {
throw new RuntimeException("Failed to write multiple values to file " + toFile(file).getAbsolutePath(), exp);
}
}
} finally {
bucket.unlockWrite();
}
}
if (totalSizeWrittenInBatch > 0) {
batchSize = BATCH_SIZE_PRIMITIVE_VALUES * 16 * batchSize / totalSizeWrittenInBatch;
}
}
}
@Override
public CloseableIterator<KeyValue<T>> iterator(final Iterator<Long> keyIterator) {
return new CloseableIterator<KeyValue<T>>() {
private Iterator<KeyValue<T>> currBatchIterator;
{
readNextBatch(); //constructor
}
private void readNextBatch() {
long batchSize = getBatchSize();
List<Long> keysInBatch = new ArrayList<>();
while (keyIterator.hasNext() && keysInBatch.size() < batchSize) {
keysInBatch.add(keyIterator.next());
}
Collections.sort(keysInBatch);
List<KeyValue<T>> valuesInBatch = new ArrayList<>();
FileInfo currentFile = null;
Map<Long, T> valuesInCurrentFile = null;
for (Long key : keysInBatch) {
FileBucket bucket = getBucket(key);
lockForRead(bucket);
FileInfo file = bucket.getFile(key);
if (file != currentFile) {
currentFile = file;
valuesInCurrentFile = readMap(file);
}
bucket.unlockRead();
T value = valuesInCurrentFile.get(key);
if (value != null) {
valuesInBatch.add(new KeyValue<>(key, value));
}
}
currBatchIterator = valuesInBatch.iterator();
}
@Override
protected void closeInt() {
}
@Override
public boolean hasNext() {
return currBatchIterator.hasNext();
}
@Override
public KeyValue<T> next() {
KeyValue<T> next = currBatchIterator.next();
if (!currBatchIterator.hasNext()) {
readNextBatch();
}
return next;
}
};
}
@Override
public CloseableIterator<KeyValue<T>> iterator() {
final FileIterator fileIterator = new FileIterator();
return IterableUtils.iterator(new SimpleIterator<KeyValue<T>>() {
private Iterator<KeyValue<T>> valuesInFileIt;
@Override
public KeyValue<T> next() throws Exception {
while ((valuesInFileIt == null || !valuesInFileIt.hasNext())) {
Pair<FileBucket, FileInfo> next = fileIterator.lockCurrentBucketAndGetNextFile();
if (next != null) {
FileBucket bucket = next.getFirst();
FileInfo file = next.getSecond();
List<KeyValue<T>> sortedEntries = readCleanValues(file);
bucket.unlockRead();
valuesInFileIt = sortedEntries.iterator();
} else {
valuesInFileIt = null;
break;
}
}
if (valuesInFileIt != null && valuesInFileIt.hasNext()) {
return valuesInFileIt.next();
} else {
return null;
}
}
});
}
@Override
public CloseableIterator<Long> keyIterator() {
final FileIterator fileIterator = new FileIterator();
return IterableUtils.iterator(new SimpleIterator<Long>() {
private Iterator<Long> keysInFileIt;
@Override
public Long next() throws Exception {
while ((keysInFileIt == null || !keysInFileIt.hasNext())) {
Pair<FileBucket, FileInfo> next = fileIterator.lockCurrentBucketAndGetNextFile();
if (next != null) {
FileBucket bucket = next.getFirst();
FileInfo file = next.getSecond();
List<Long> sortedKeys = readKeys(file);
bucket.unlockRead();
keysInFileIt = sortedKeys.iterator();
} else {
keysInFileIt = null;
break;
}
}
if (keysInFileIt != null && keysInFileIt.hasNext()) {
return keysInFileIt.next();
} else {
return null;
}
}
});
}
@Override
public void freeMemory() {
ifNotClosed(() -> {
for (FileBucket bucket : fileBuckets) {
bucket.lockRead();
for (FileInfo fileInfo : bucket.getFiles()) {
long bytesReleased = fileInfo.discardFileContents();
updateSizeOfCachedFileContents(-bytesReleased);
}
bucket.unlockRead();
}
});
}
@Override
public long getMemoryUsage() {
return currentSizeOfCachedFileContents;
}
@Override
public long apprSize() {
int numOfFilesToSample = 100;
long numOfObjects = 0;
long sizeOfSampledFiles = 0;
int numOfSampledFiles = 0;
long sizeOfAllFiles = 0;
try {
FileIterator fileIt = new FileIterator();
Pair<FileBucket, FileInfo> next = fileIt.lockCurrentBucketAndGetNextFile();
while (next != null) {
FileBucket bucket = next.getFirst();
FileInfo file = next.getSecond();
long fileSize = file.getReadSize();
if (numOfSampledFiles < numOfFilesToSample) {
List<Long> keys = readKeys(file);
numOfObjects += keys.size();
sizeOfSampledFiles += fileSize;
if (fileSize == 0 && !keys.isEmpty()) {
UI.writeError("Something is wrong with file " + file.getFirstKey());
}
numOfSampledFiles++;
}
bucket.unlockRead();
sizeOfAllFiles += fileSize;
next = fileIt.lockCurrentBucketAndGetNextFile();
}
if (numOfObjects == 0) {
return 0;
} else {
return sizeOfAllFiles * numOfObjects / sizeOfSampledFiles;
}
} catch (IOException exp) {
throw new RuntimeException(exp);
}
}
@Override
public void flush() {
updateShouldBeCleanedInfo();
}
@Override
public void optimizeForReading() {
rewriteAllFiles(true);
}
@Override
protected void doClose() {
updateShouldBeCleanedInfo();
if (metaFileOutOfSync) {
writeMetaFile();
}
fileBuckets = null;
}
@Override
public void dropAllData() {
writeLockAllBuckets();
for (FileBucket bucket : fileBuckets) {
for (FileInfo file : bucket.getFiles()) {
deleteFile(file);
}
bucket.getFiles().clear();
bucket.setShouldBeCleanedBeforeRead(false);
}
makeSureAllFileBucketsHaveAtLeastOneFile();
writeUnlockAllBuckets();
writeMetaFile();
}
private void updateShouldBeCleanedInfo() {
for (FileBucket fileBucket : fileBuckets) {
fileBucket.lockWrite();
if (!allFilesClean(fileBucket)) {
fileBucket.setShouldBeCleanedBeforeRead(true);
}
fileBucket.unlockWrite();
}
}
private synchronized void rewriteAllFiles(boolean forceClean) {
int numOfFilesRewritten = fileBuckets.parallelStream().collect(Collectors.summingInt(bucket -> rewriteBucket(bucket, forceClean)));
if (metaFileOutOfSync) {
writeMetaFile();
}
if (DBUtils.DEBUG && numOfFilesRewritten > 0) {
UI.write("Rewritten " + numOfFilesRewritten + " files for " + getName());
}
}
private int rewriteBucket(FileBucket bucket, boolean forceClean) {
bucket.lockWrite();
try {
int numOfRewrittenFiles = 0;
for (int fileInd = 0; fileInd < bucket.getFiles().size() && (!closeWasRequested() || forceClean); fileInd++) {
FileInfo file = bucket.getFiles().get(fileInd);
boolean needsRewrite;
long targetSize;
if (inReadPhase() || forceClean) {
//read phrase
needsRewrite = !file.isClean();
targetSize = MAX_FILE_SIZE_READ;
} else {
//write phase
double probOfRewriteForSize = file.getWriteSize() * 4.0 / MAX_FILE_SIZE_WRITE - 3.0;
needsRewrite = !file.isClean() && Math.random() < probOfRewriteForSize;
targetSize = MAX_FILE_SIZE_READ;
}
if (needsRewrite) {
// UI.write("Will rewrite file " + file.getFirstKey() + " " + getName() + " clean=" + file.isClean() + " force=" + forceClean + " readSize=" + file.getReadSize() + " writeSize=" + file.getWriteSize() + " targetSize=" + targetSize);
List<KeyValue<T>> values = readAllValues(file);
int filesMergedWithThisFile = inWritePhase() ? 0 : mergeFileIfTooSmall(bucket.getFiles(), fileInd, file.getWriteSize(), targetSize, values);
DataOutputStream dos = getOutputStreamToTempFile(file);
List<Pair<Long, Integer>> fileLocations = new ArrayList<>();
int currentSizeOfFile = 0;
for (KeyValue<T> entry : values) {
long key = entry.getKey();
T value = entry.getValue();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
DataOutputStream tmpOutputStream = new DataOutputStream(bos);
writeValue(tmpOutputStream, key, value);
byte[] dataToWrite = bos.toByteArray();
if (currentSizeOfFile > 0 && currentSizeOfFile + dataToWrite.length > targetSize) {
//Create new file
if (filesMergedWithThisFile > 0) {
throw new RuntimeException("Something went wrong! Merged file and then created new file?");
}
dos.close();
swapTempForReal(file);
file.fileWasRewritten(sample(fileLocations, 200), currentSizeOfFile, currentSizeOfFile);
fileLocations = new ArrayList<>();
file = new FileInfo(key, 0, 0);
currentSizeOfFile = 0;
bucket.getFiles().add(fileInd + 1, file);
fileInd++;
dos = getOutputStreamToTempFile(file);
}
fileLocations.add(new Pair<>(key, currentSizeOfFile));
dos.write(dataToWrite);
currentSizeOfFile += dataToWrite.length;
}
swapTempForReal(file);
file.fileWasRewritten(sample(fileLocations, 200), currentSizeOfFile, currentSizeOfFile);
dos.close();
numOfRewrittenFiles++;
}
}
boolean allFilesClean = allFilesClean(bucket);
if (allFilesClean) {
bucket.setShouldBeCleanedBeforeRead(false);
}
if (numOfRewrittenFiles > 0) {
metaFileOutOfSync = true;
}
return numOfRewrittenFiles;
} catch (Exception exp) {
UI.writeError("Unexpected exception while rewriting files", exp);
throw new RuntimeException("Unexpected exception while rewriting files", exp);
} finally {
bucket.unlockWrite();
}
}
private boolean allFilesClean(FileBucket bucket) {
boolean allFilesClean = true;
for (FileInfo file : bucket.getFiles()) {
allFilesClean &= file.isClean();
}
return allFilesClean;
}
private void deleteFile(FileInfo file) {
boolean success = toFile(file).delete();
if (!success) {
throw new RuntimeException("Failed to delete file " + toFile(file).getAbsolutePath());
}
}
private void dataWasWritten() {
timeOfLastWrite = System.currentTimeMillis();
metaFileOutOfSync = true;
}
private void dataWasRead() {
timeOfLastRead = System.currentTimeMillis();
}
private boolean inReadPhase() {
return !inWritePhase();
}
private boolean inWritePhase() {
return timeOfLastWrite > timeOfLastRead && System.currentTimeMillis() - timeOfLastRead > 10 * 1000;
}
private void updateSizeOfCachedFileContents(long byteDiff) {
synchronized (sizeOfCachedFileContentsLock) {
currentSizeOfCachedFileContents += byteDiff;
}
}
private void writeLockAllBuckets() {
for (FileBucket fileBucket : fileBuckets) {
fileBucket.lockWrite();
}
}
private void writeUnlockAllBuckets() {
for (FileBucket fileBucket : fileBuckets) {
fileBucket.unlockWrite();
}
}
private void readLockAllBuckets() {
for (FileBucket fileBucket : fileBuckets) {
fileBucket.lockRead();
}
}
private void readUnlockAllBuckets() {
for (FileBucket fileBucket : fileBuckets) {
fileBucket.unlockRead();
}
}
private void lockForRead(FileBucket bucket) {
bucket.lockRead();
while (bucket.shouldBeCleanedBeforeRead()) {
bucket.unlockRead();
rewriteBucket(bucket, true);
bucket.lockRead();
}
}
private void swapTempForReal(FileInfo file) throws IOException {
synchronized (file) {
long releasedBytes = file.discardFileContents();
updateSizeOfCachedFileContents(-releasedBytes);
}
Files.move(toTempFile(file).toPath(), toFile(file).toPath(), StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING);
}
private int mergeFileIfTooSmall(List<FileInfo> fileList, int currentFileInd, long combinedSize, long maxFileSize, List<KeyValue<T>> values) {
int nextFileInd = currentFileInd + 1;
while (nextFileInd < fileList.size() && combinedSize + fileList.get(nextFileInd).getWriteSize() < maxFileSize) {
//Combine the files
FileInfo nextFile = fileList.remove(nextFileInd);
values.addAll(readAllValues(nextFile));
combinedSize += nextFile.getWriteSize();
deleteFile(nextFile);
}
return nextFileInd - currentFileInd - 1;
}
private int writeValue(DataOutputStream dos, long key, T value) throws IOException {
dos.writeLong(key);
byte[] objectAsBytes = SerializationUtils.objectToBytesCheckForNull(value, getObjectClass());
if (sizeOfValues == -1) {
dos.writeInt(objectAsBytes.length);
dos.write(objectAsBytes);
return 8 + 4 + objectAsBytes.length;
} else {
dos.write(objectAsBytes);
return 8 + sizeOfValues;
}
}
private ReadValue<T> readValue(byte[] buffer, int position) throws IOException {
int lengthOfObject;
int lenghtOfLengthValue;
if (sizeOfValues == -1) {
lengthOfObject = SerializationUtils.bytesToInt(buffer, position);
lenghtOfLengthValue = INT_SIZE;
} else {
lengthOfObject = sizeOfValues;
lenghtOfLengthValue = 0;
}
T value = SerializationUtils.bytesToObjectCheckForNull(buffer, position + lenghtOfLengthValue, lengthOfObject, getObjectClass());
return new ReadValue<>(lengthOfObject + lenghtOfLengthValue, value);
}
private List<FileBucket> createEmptyFileBuckets() {
List<FileBucket> bucket = new ArrayList<>(1 << (64 - BITS_TO_DISCARD_FOR_FILE_BUCKETS));
long start = Long.MIN_VALUE >> BITS_TO_DISCARD_FOR_FILE_BUCKETS;
long end = Long.MAX_VALUE >> BITS_TO_DISCARD_FOR_FILE_BUCKETS;
for (long val = start; val <= end; val++) {
long firstKey = val << BITS_TO_DISCARD_FOR_FILE_BUCKETS;
long lastKey = ((val + 1) << BITS_TO_DISCARD_FOR_FILE_BUCKETS) - 1;
if (lastKey < firstKey) {
//overflow
lastKey = Long.MAX_VALUE;
}
bucket.add(new FileBucket(firstKey, lastKey));
}
return bucket;
}
private void checkDataDir() {
if (!directory.exists()) {
boolean success = directory.mkdirs();
if (!success) {
throw new RuntimeException("Failed to create directory " + directory.getAbsolutePath());
}
}
if (directory.isFile()) {
throw new IllegalArgumentException("File should be directory but is file! " + directory.getAbsolutePath());
}
}
private void initializeFiles(MetaFile metaFile) {
String[] filesInDir = this.directory.list();
if (metaFile != null && metaFileUpToDate(metaFile, filesInDir)) {
metaFileOutOfSync = false;
timeOfLastRead = metaFile.getLastRead();
timeOfLastWrite = metaFile.getLastWrite();
fileBuckets = metaFile.getFileBuckets();
} else {
metaFileOutOfSync = true;
timeOfLastRead = timeOfLastWrite = 0;
fileBuckets = createEmptyFileBuckets();
if (filesInDir.length > 0) {
UI.write("Missing (up-to-date) meta information for " + getName() + " will reconstruct data structures from files found in directory.");
updateBucketsFromFiles(filesInDir);
}
makeSureAllFileBucketsHaveAtLeastOneFile();
}
}
private boolean metaFileUpToDate(MetaFile metaFile, String[] filesInDir) {
for (String file : filesInDir) {
if (file.matches("-?[0-9]+")) {
long key = Long.parseLong(file);
FileBucket bucket = getBucket(metaFile.getFileBuckets(), key);
long sizeOnDisk = new File(directory, file).length();
FileInfo fileInfo = bucket.getFile(key);
if (fileInfo.getFirstKey() != key) {
return false; //the name of the file on disk should be equal to the first key
}
if (fileInfo.getWriteSize() != sizeOnDisk) {
return false; //the file write size should be equal to the size on disk
}
if (!fileInfo.isClean() && !bucket.shouldBeCleanedBeforeRead()) {
return false; //if the file is dirty, the bucket should be marked as 'shouldBeCleanedBeforeRead'
}
}
}
for (FileBucket fileBucket : metaFile.getFileBuckets()) {
if (fileBucket.getFiles().isEmpty()) {
return false; //every bucket should contain at least one file
}
if (fileBucket.getFirstKey() != fileBucket.getFiles().get(0).getFirstKey()) {
return false; //the first key of the bucket should match the first key of the first file
}
for (int i = 0; i < fileBucket.getFiles().size() - 1; i++) {
if (fileBucket.getFiles().get(i).getFirstKey() >= fileBucket.getFiles().get(i + 1).getFirstKey()) {
return false; //files should be sorted according to first key
}
}
}
return true; //all good!
}
private void updateBucketsFromFiles(String[] filesInDir) {
for (String file : filesInDir) {
if (file.matches("-?[0-9]+")) {
long key = Long.parseLong(file);
FileBucket bucket = getBucket(key);
long sizeOnDisk = new File(directory, file).length();
FileInfo fileInfo = new FileInfo(key, 0, (int) sizeOnDisk);
bucket.getFiles().add(fileInfo);
bucket.setShouldBeCleanedBeforeRead(bucket.shouldBeCleanedBeforeRead() || sizeOnDisk > 0);
}
}
}
private void makeSureAllFileBucketsHaveAtLeastOneFile() {
for (FileBucket bucket : fileBuckets) {
if (bucket.getFiles().isEmpty()) {
//We need at least one file per bucket..
FileInfo first = new FileInfo(bucket.getFirstKey(), 0, 0);
try {
boolean success = toFile(first).createNewFile();
if (!success) {
throw new RuntimeException("Failed to create new file " + first + " at " + toFile(first).getAbsolutePath());
} else {
bucket.getFiles().add(first);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
} else {
Collections.sort(bucket.getFiles());
if (bucket.getFirstKey() != bucket.getFiles().get(0).getFirstKey()) {
throw new RuntimeException("Missing file in " + getName() + " ? Expected file " + new File(directory, Long.toString(bucket.getFirstKey())).getAbsolutePath());
}
}
}
}
private MetaFile readMetaInfo() {
File cleanFilesFile = new File(directory, META_FILE);
if (cleanFilesFile.exists()) {
try {
InputStream fis = new BufferedInputStream(new FileInputStream(cleanFilesFile));
MetaFile result = SerializationUtils.readObject(MetaFile.class, fis);
IOUtils.closeQuietly(fis);
return result;
} catch (Exception exp) {
UI.writeError("Received exception while reading " + cleanFilesFile.getAbsolutePath(), exp);
}
}
return null;
}
private synchronized void writeMetaFile() {
readLockAllBuckets();
metaFileOutOfSync = false;
File outputFile = new File(directory, META_FILE);
try {
MetaFile metaFile = new MetaFile(fileBuckets, timeOfLastWrite, timeOfLastRead);
FileOutputStream fos = new FileOutputStream(outputFile);
SerializationUtils.writeObject(metaFile, fos);
IOUtils.closeQuietly(fos);
} catch (Exception exp) {
metaFileOutOfSync = true;
throw new RuntimeException("Received exception while writing list of clean files to " + outputFile.getAbsolutePath(), exp);
} finally {
readUnlockAllBuckets();
}
}
private FileBucket getBucket(long key) {
return getBucket(fileBuckets, key);
}
private FileBucket getBucket(List<FileBucket> fileBuckets, long key) {
int ind = (int) ((key >> BITS_TO_DISCARD_FOR_FILE_BUCKETS) + fileBuckets.size() / 2);
return fileBuckets.get(ind);
}
private ReadBuffer getReadBuffer(FileInfo file, int requestedStartPos, int requestedEndPos) throws IOException {
byte[] fileContents = file.getCachedFileContents();
if (fileContents == null) {
if (memoryManager.getMemoryStatus() == MemoryStatus.FREE && currentSizeOfCachedFileContents < maxSizeOfCachedFileContents) {
//cache file contents. Lock on file object to make sure we don't read the content in parallel (this messes up the currentSizeOfCachedFileContents variable and is not very efficient)
synchronized (file) {
fileContents = file.getCachedFileContents();
if (fileContents == null) {
fileContents = new byte[file.getReadSize()];
FileInputStream fis = new FileInputStream(toFile(file));
int bytesRead = fis.read(fileContents);
if (bytesRead != file.getReadSize()) {
throw new RuntimeException("Read " + bytesRead + " bytes, while we expected " + file.getReadSize() + " bytes in file " + toFile(file).getAbsolutePath() + " which currently has size " + toFile(file).length());
}
updateSizeOfCachedFileContents(fileContents.length);
IOUtils.closeQuietly(fis);
}
file.setCachedFileContents(fileContents);
}
return new ReadBuffer(fileContents, 0);
} else {
FileInputStream fis = new FileInputStream(toFile(file));
long bytesSkipped = fis.skip(requestedStartPos);
if (bytesSkipped != requestedStartPos) {
throw new RuntimeException("Skipped " + bytesSkipped + " bytes, while we expected to skip " + requestedStartPos + " bytes in file " + toFile(file).getAbsolutePath() + " which currently has size " + toFile(file).length());
}
byte[] buffer = new byte[requestedEndPos - requestedStartPos];
int bytesRead = fis.read(buffer);
if (bytesRead != buffer.length) {
throw new RuntimeException("Read " + bytesRead + " bytes, while we expected " + file.getReadSize() + " bytes in file " + toFile(file).getAbsolutePath() + " which currently has size " + toFile(file).length());
}
IOUtils.closeQuietly(fis);
return new ReadBuffer(buffer, requestedStartPos);
}
} else {
if (fileContents.length != file.getReadSize()) {
throw new RuntimeException("Buffer and file size don't match!");
}
return new ReadBuffer(fileContents, 0);
}
}
private int skipValue(byte[] buffer, int position) throws IOException {
//Skip some bytes
Class<T> objectClass = getObjectClass();
if (objectClass == Long.class || objectClass == Double.class) {
return LONG_SIZE;
} else if (objectClass == Integer.class || objectClass == Float.class) {
return INT_SIZE;
} else {
int length = SerializationUtils.bytesToInt(buffer, position);
return INT_SIZE + length;
}
}
private DataOutputStream getAppendingOutputStream(FileInfo fileInfo) throws FileNotFoundException {
return new DataOutputStream(new BufferedOutputStream(new FileOutputStream(toFile(fileInfo), true)));
}
private DataOutputStream getOutputStreamToTempFile(FileInfo fileInfo) throws FileNotFoundException {
return new DataOutputStream(new BufferedOutputStream(new FileOutputStream(toTempFile(fileInfo), false)));
}
private File toFile(FileInfo fileInfo) {
if (directory == null) {
throw new RuntimeException("Directory is null, probably the data interface was closed already!");
}
return new File(directory, Long.toString(fileInfo.getFirstKey()));
}
private File toTempFile(FileInfo fileInfo) {
if (directory == null) {
throw new RuntimeException("Directory is null, probably the data interface was closed already!");
}
return new File(directory, "tmp." + Long.toString(fileInfo.getFirstKey()));
}
private Map<Long, T> readMap(FileInfo file) {
List<KeyValue<T>> values = readCleanValues(file);
Map<Long, T> result = new HashMap<>(values.size());
for (KeyValue<T> value : values) {
result.put(value.getKey(), value.getValue());
}
return result;
}
private List<KeyValue<T>> readCleanValues(FileInfo file) {
try {
byte[] buffer = getReadBuffer(file, 0, file.getReadSize()).getBuffer();
int expectedNumberOfValues = getLowerBoundOnNumberOfValues(file.getReadSize());
List<KeyValue<T>> result = new ArrayList<>(expectedNumberOfValues);
int position = 0;
while (position < buffer.length) {
long key = SerializationUtils.bytesToLong(buffer, position);
position += LONG_SIZE;
ReadValue<T> readValue = readValue(buffer, position);
position += readValue.getSize();
result.add(new KeyValue<>(key, readValue.getValue()));
}
dataWasRead();
return result;
} catch (Exception ex) {
throw new RuntimeException("Unexpected exception while reading values from file " + toFile(file).getAbsolutePath(), ex);
}
}
private List<KeyValue<T>> readAllValues(FileInfo file) {
try {
byte[] buffer = readCompleteFile(file);
if (buffer.length > 0) {
int expectedNumberOfValues = getLowerBoundOnNumberOfValues(file.getWriteSize());
List<KeyValue<T>> result = new ArrayList<>(expectedNumberOfValues);
//read values in buckets
int numberOfBuckets = Math.max(1, expectedNumberOfValues / 1000);
List[] buckets = new List[numberOfBuckets];
for (int i = 0; i < buckets.length; i++) {
buckets[i] = new ArrayList(expectedNumberOfValues / numberOfBuckets);
}
long start = file.getFirstKey();
long density = (1l << BITS_TO_DISCARD_FOR_FILE_BUCKETS) / numberOfBuckets;
int position = 0;
while (position < buffer.length) {
long key = SerializationUtils.bytesToLong(buffer, position);
position += LONG_SIZE;
ReadValue<T> readValue = readValue(buffer, position);
position += readValue.getSize();
int bucketInd = (int) ((key - start) / density);
if (bucketInd == buckets.length) {
bucketInd--; //rounding error?
}
buckets[bucketInd].add(new KeyValue<>(key, readValue.getValue()));
}
for (int bucketInd = 0; bucketInd < buckets.length; bucketInd++) {
List<KeyValue<T>> currentBucket = buckets[bucketInd];
DBUtils.mergeValues(result, currentBucket, getCombinator());
buckets[bucketInd] = null; //Free some memory
}
return result;
} else {
return Collections.emptyList();
}
} catch (Exception ex) {
throw new RuntimeException("Unexpected exception while reading values from file " + toFile(file).getAbsolutePath(), ex);
}
}
private byte[] readCompleteFile(FileInfo file) throws IOException {
FileInputStream fis = new FileInputStream(toFile(file));
byte[] buffer = new byte[file.getWriteSize()];
int bytesRead = fis.read(buffer);
if (bytesRead != buffer.length) {
if (!(buffer.length == 0 && bytesRead == -1)) {
throw new RuntimeException("Read " + bytesRead + " bytes, while we expected " + buffer.length + " bytes in file " + toFile(file).getAbsolutePath() + " which currently has size " + toFile(file).length());
}
}
IOUtils.closeQuietly(fis);
return buffer;
}
private int getLowerBoundOnNumberOfValues(int sizeOfFile) {
int width = sizeOfValues;
if (width == -1) {
width = 4; //will probably be much larger...
}
return sizeOfFile / (8 + width);
}
private List<Long> readKeys(FileInfo file) throws IOException {
List<Long> result = new ArrayList<>();
byte[] buffer = getReadBuffer(file, 0, file.getReadSize()).getBuffer();
int position = 0;
while (position < buffer.length) {
result.add(SerializationUtils.bytesToLong(buffer, position));
position += LONG_SIZE;
position += skipValue(buffer, position);
}
dataWasRead();
return result;
}
private List<Pair<Long, Integer>> sample(List<Pair<Long, Integer>> fileLocations, int invSampleRate) {
List<Pair<Long, Integer>> result = new ArrayList<>(fileLocations.size() / invSampleRate);
for (int i = 0; i < fileLocations.size(); i++) {
if (i % invSampleRate == 0) {
result.add(fileLocations.get(i));
}
}
return result;
}
private void checkLock() {
File lockFile = new File(directory, LOCK_FILE);
try {
DataInputStream dis = new DataInputStream(new FileInputStream(lockFile));
long id = dis.readLong();
IOUtils.closeQuietly(dis);
if (randomId != id) {
writeLockFile(new Random().nextLong()); //try to notify other data interface that something is fucked up
UI.writeError("The lock in " + lockFile.getAbsolutePath() + " was obtained by another data interface! Closing data interface. This will probably cause a lot of other errors...");
close();
}
} catch (Exception exp) {
throw new RuntimeException("Unexpected exception while trying to read lock file " + lockFile.getAbsolutePath());
}
}
private void writeLockFile(long id) {
File lockFile = new File(directory, LOCK_FILE);
try {
DataOutputStream dos = new DataOutputStream(new FileOutputStream(lockFile));
dos.writeLong(id);
IOUtils.closeQuietly(dos);
} catch (Exception exp) {
throw new RuntimeException("Unexpected exception while trying to write lock file to " + lockFile.getAbsolutePath(), exp);
}
}
private long getBatchSize() {
return SerializationUtils.getWidth(getObjectClass()) == -1 ? BATCH_SIZE_NON_PRIMITIVE_VALUES : BATCH_SIZE_PRIMITIVE_VALUES;
}
private static class ReadBuffer {
private final byte[] buffer;
private final int offset;
private ReadBuffer(byte[] buffer, int offset) {
this.buffer = buffer;
this.offset = offset;
}
public byte[] getBuffer() {
return buffer;
}
public int getOffset() {
return offset;
}
}
private static class ReadValue<T> {
private int size;
private T value;
private ReadValue(int size, T value) {
this.size = size;
this.value = value;
}
public int getSize() {
return size;
}
public T getValue() {
return value;
}
}
private class FileIterator {
private int currentBucketInd = 0;
private int fileInd = 0;
public Pair<FileBucket, FileInfo> lockCurrentBucketAndGetNextFile() {
if (currentBucketInd < fileBuckets.size()) {
FileBucket bucket = fileBuckets.get(currentBucketInd);
lockForRead(bucket);
while (currentBucketInd < fileBuckets.size() && fileInd >= bucket.getFiles().size()) {
fileInd = 0;
bucket.unlockRead();
currentBucketInd++;
if (currentBucketInd < fileBuckets.size()) {
bucket = fileBuckets.get(currentBucketInd);
lockForRead(bucket);
}
}
if (currentBucketInd < fileBuckets.size()) {
return new Pair<>(bucket, bucket.getFiles().get(fileInd++));
}
}
return null;
}
}
public static class MetaFile {
private List<FileBucket> fileBuckets;
private long lastWrite;
private long lastRead;
public MetaFile(List<FileBucket> fileBuckets, long lastWrite, long lastRead) {
this.fileBuckets = fileBuckets;
this.lastRead = lastRead;
this.lastWrite = lastWrite;
}
//Constructor used in serialization
public MetaFile() {
}
public List<FileBucket> getFileBuckets() {
return fileBuckets;
}
public void setFileBuckets(List<FileBucket> fileBuckets) {
this.fileBuckets = fileBuckets;
}
public long getLastWrite() {
return lastWrite;
}
public void setLastWrite(long lastWrite) {
this.lastWrite = lastWrite;
}
public long getLastRead() {
return lastRead;
}
public void setLastRead(long lastRead) {
this.lastRead = lastRead;
}
}
} |
package ch.pontius.nio.smb;
import jcifs.smb.SmbFile;
import jcifs.smb.SmbRandomAccessFile;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.SeekableByteChannel;
import java.nio.file.FileAlreadyExistsException;
public final class SeekableSMBByteChannel implements SeekableByteChannel {
/** Internal {@link SmbRandomAccessFile} reference to write to {@link SmbFile}. */
private final SmbRandomAccessFile random;
/** Boolean indicating whether this instance of {@link SeekableSMBByteChannel} is open. */
private volatile boolean open = true;
/**
* Constructor for {@link SeekableSMBByteChannel}
*
* @param file The {@link SmbFile} instance that should be opened.
* @param write Flag that indicates, whether write access is requested.
* @param create Flag that indicates, whether file should be created.
* @param create_new Flag that indicates, whether file should be created. If it is set to true, operation will fail if file exists!
* @param truncate Flag that indicates, whether file should be truncated to length 0 when being opened.
* @param append Flag that indicates, whether data should be appended.
* @throws IOException If something goes wrong when accessing the file.
*/
SeekableSMBByteChannel(SmbFile file, boolean write, boolean create, boolean create_new, boolean truncate, boolean append) throws IOException {
/* Tries to create a new file, if so specified. */
if (create || create_new) {
if (file.exists()) {
if (create_new) throw new FileAlreadyExistsException("The specified file '" + file.getPath() + "' does already exist!");
} else {
file.createNewFile();
}
}
/* Opens the file with either read only or write access. */
if (write) {
file.setReadWrite();
this.random = new SmbRandomAccessFile(file, "rw");
if (truncate) this.random.setLength(0);
if (append) this.random.seek(this.random.length());
} else {
file.setReadOnly();
this.random = new SmbRandomAccessFile(file, "r");
}
}
/**
* Reads the content from the {@link SmbRandomAccessFile} handled by the current instance of {@link SeekableSMBByteChannel} to
* the provided {@link ByteBuffer}. The {@link ByteBuffer} is written from its current position to its end.
*
* @param dst {@link ByteBuffer} to which to write the data.
* @return Number of bytes that were read.
* @throws IOException If something goes wrong while reading to the file.
*/
@Override
public synchronized int read(ByteBuffer dst) throws IOException {
if (!this.open) throw new ClosedChannelException();
final int len = dst.limit() - dst.position();
final byte[] buffer = new byte[len];
final int read = this.random.read(buffer);
if (read > 0) dst.put(buffer, 0, read);
return read;
}
/**
* Writes the content of the provided {@link ByteBuffer} into the {@link SmbRandomAccessFile} handled by the current
* instance of {@link SeekableSMBByteChannel}. The {@link ByteBuffer} is read from its current position to it end.
*
* @param src {@link ByteBuffer} from which to read the data.
* @return Number of bytes that were written.
* @throws IOException If something goes wrong while writing to the file.
*/
@Override
public synchronized int write(ByteBuffer src) throws IOException {
if (!this.open) throw new ClosedChannelException();
final int len = src.limit() - src.position();
final byte[] buffer = new byte[len];
src.get(buffer);
this.random.write(buffer);
return len;
}
/**
* Returns the position of the pointer into the {@link SmbRandomAccessFile} that is handled by the current instance of {@link SeekableSMBByteChannel}
*
* @return newPosition New position within the file.
* @throws IOException If something goes wrong while trying to determine file size.
*/
@Override
public synchronized long position() throws IOException {
if (!this.open) throw new ClosedChannelException();
return this.random.getFilePointer();
}
/**
* Returns the size of the file handled by the current instance of {@link SeekableSMBByteChannel}. The size
* is given in number of bytes.
*
* @return size Size of the SMB file.
* @throws IOException If something goes wrong while trying to determine file size.
*/
@Override
public synchronized long size() throws IOException {
if (!this.open) throw new ClosedChannelException();
return this.random.length();
}
/**
* Tries to reposition the pointer into the {@link SmbRandomAccessFile} that is handled by the current instance of {@link SeekableSMBByteChannel}
*
* @param newPosition New position within the file.
* @return Current instance of {@link SeekableSMBByteChannel}.
* @throws IOException If something goes wrong while trying to determine file size.
*/
@Override
public synchronized SeekableByteChannel position(long newPosition) throws IOException {
if (!this.open) throw new ClosedChannelException();
this.random.seek(newPosition);
return this;
}
/**
* Truncates the {@link SmbRandomAccessFile} by setting its length to the provided value.
*
* @param size New size of the file.
* @return Current instance of {@link SeekableSMBByteChannel}.
* @throws IOException If something goes wrong during truncation.
*/
@Override
public synchronized SeekableByteChannel truncate(long size) throws IOException {
if (!this.open) throw new ClosedChannelException();
this.random.setLength(size);
return this;
}
/**
* Determines whether the current {@link SeekableSMBByteChannel} is still opened.
*
* @return True if {@link SeekableSMBByteChannel} and false otherwise.
*/
@Override
public synchronized boolean isOpen() {
return this.open;
}
/**
* Closes the current {@link SeekableSMBByteChannel}. After that, is is not possible to either read from or
* write to the channel.
*
* @throws IOException If something goes wrong while closing the channel.
*/
@Override
public synchronized void close() throws IOException {
if (!this.open) throw new ClosedChannelException();
this.open = false;
this.random.close();
}
} |
package cn.momia.mapi.api.v1.teacher;
import cn.momia.api.course.CourseServiceApi;
import cn.momia.api.course.dto.Course;
import cn.momia.api.course.dto.CourseSku;
import cn.momia.api.course.dto.TeacherCourse;
import cn.momia.api.teacher.OldTeacherServiceApi;
import cn.momia.api.teacher.dto.Material;
import cn.momia.api.teacher.dto.Student;
import cn.momia.api.user.ChildServiceApi;
import cn.momia.api.user.TeacherServiceApi;
import cn.momia.api.user.UserServiceApi;
import cn.momia.api.user.dto.Child;
import cn.momia.api.user.dto.ChildComment;
import cn.momia.api.user.dto.ChildRecord;
import cn.momia.api.user.dto.ChildTag;
import cn.momia.api.user.dto.Teacher;
import cn.momia.api.user.dto.TeacherStatus;
import cn.momia.api.user.dto.User;
import cn.momia.common.core.dto.PagedList;
import cn.momia.common.core.http.MomiaHttpResponse;
import cn.momia.common.core.util.TimeUtil;
import cn.momia.common.webapp.config.Configuration;
import cn.momia.mapi.api.AbstractApi;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@RestController
@RequestMapping("/v1/teacher")
public class TeacherV1Api extends AbstractApi {
@Autowired private CourseServiceApi courseServiceApi;
@Autowired private ChildServiceApi childServiceApi;
@Autowired private UserServiceApi userServiceApi;
@Autowired private TeacherServiceApi teacherServiceApi;
@Autowired private OldTeacherServiceApi oldTeacherServiceApi;
@RequestMapping(value = "/status", method = RequestMethod.GET)
public MomiaHttpResponse status(@RequestParam String utoken) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
TeacherStatus status = teacherServiceApi.status(utoken);
if (status.getStatus() == TeacherStatus.Status.NOT_EXIST) return MomiaHttpResponse.SUCCESS(status);
Teacher teacher = completeTeacherImgs(teacherServiceApi.get(utoken));
JSONObject statusJson = (JSONObject) JSON.toJSON(teacher);
statusJson.put("status", status.getStatus());
statusJson.put("msg", status.getMsg());
return MomiaHttpResponse.SUCCESS(statusJson);
}
private Teacher completeTeacherImgs(Teacher teacher) {
teacher.setPic(completeSmallImg(teacher.getPic()));
return teacher;
}
@RequestMapping(value = "/signup", method = RequestMethod.POST)
public MomiaHttpResponse signup(@RequestParam String utoken, @RequestParam String teacher) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (StringUtils.isBlank(teacher)) return MomiaHttpResponse.BAD_REQUEST;
return MomiaHttpResponse.SUCCESS(teacherServiceApi.add(utoken, teacher));
}
@RequestMapping(value = "/experience", method = RequestMethod.POST)
public MomiaHttpResponse addExperience(@RequestParam String utoken, @RequestParam String experience) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (StringUtils.isBlank(experience)) return MomiaHttpResponse.BAD_REQUEST;
return MomiaHttpResponse.SUCCESS(teacherServiceApi.addExperience(utoken, experience));
}
@RequestMapping(value = "/experience", method = RequestMethod.GET)
public MomiaHttpResponse getExperience(@RequestParam String utoken, @RequestParam int id) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (id <= 0) return MomiaHttpResponse.BAD_REQUEST;
return MomiaHttpResponse.SUCCESS(teacherServiceApi.getExperience(utoken, id));
}
@RequestMapping(value = "/experience/delete", method = RequestMethod.POST)
public MomiaHttpResponse addExperience(@RequestParam String utoken, @RequestParam int id) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (id <= 0) return MomiaHttpResponse.BAD_REQUEST;
return MomiaHttpResponse.SUCCESS(teacherServiceApi.deleteExperience(utoken, id));
}
@RequestMapping(value = "/education", method = RequestMethod.POST)
public MomiaHttpResponse addEducation(@RequestParam String utoken, @RequestParam String education) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (StringUtils.isBlank(education)) return MomiaHttpResponse.BAD_REQUEST;
return MomiaHttpResponse.SUCCESS(teacherServiceApi.addEducation(utoken, education));
}
@RequestMapping(value = "/education", method = RequestMethod.GET)
public MomiaHttpResponse getEducation(@RequestParam String utoken, @RequestParam int id) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (id <= 0) return MomiaHttpResponse.BAD_REQUEST;
return MomiaHttpResponse.SUCCESS(teacherServiceApi.getEducation(utoken, id));
}
@RequestMapping(value = "/education/delete", method = RequestMethod.POST)
public MomiaHttpResponse addEducation(@RequestParam String utoken, @RequestParam int id) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (id <= 0) return MomiaHttpResponse.BAD_REQUEST;
return MomiaHttpResponse.SUCCESS(teacherServiceApi.deleteEducation(utoken, id));
}
@RequestMapping(method = RequestMethod.GET)
public MomiaHttpResponse get(@RequestParam String utoken) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
return MomiaHttpResponse.SUCCESS(completeTeacherImgs(teacherServiceApi.get(utoken)));
}
@RequestMapping(value = "/material", method = RequestMethod.GET)
public MomiaHttpResponse getMaterial(@RequestParam String utoken, @RequestParam(value = "mid") int materialId) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (materialId <= 0) return MomiaHttpResponse.BAD_REQUEST;
return MomiaHttpResponse.SUCCESS(completeMaterialImgs(oldTeacherServiceApi.getMaterial(utoken, materialId)));
}
private Material completeMaterialImgs(Material material) {
material.setCover(completeMiddleImg(material.getCover()));
return material;
}
@RequestMapping(value = "/material/list", method = RequestMethod.GET)
public MomiaHttpResponse listMaterials(@RequestParam String utoken, @RequestParam int start) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (start < 0) return MomiaHttpResponse.BAD_REQUEST;
PagedList<Material> pagedMaterials = oldTeacherServiceApi.listMaterials(utoken, start, Configuration.getInt("PageSize.Material"));
completeMaterialsImgs(pagedMaterials.getList());
return MomiaHttpResponse.SUCCESS(pagedMaterials);
}
private List<Material> completeMaterialsImgs(List<Material> materials) {
for (Material material : materials) {
completeMaterialImgs(material);
}
return materials;
}
@RequestMapping(value = "/course/ongoing", method = RequestMethod.GET)
public MomiaHttpResponse ongoing(@RequestParam String utoken) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
JSONObject resultJson = new JSONObject();
User user = userServiceApi.get(utoken);
TeacherCourse teacherCourse = courseServiceApi.getOngoingTeacherCourse(user.getId());
if (teacherCourse.exists()) {
teacherCourse.setCover(completeMiddleImg(teacherCourse.getCover()));
resultJson.put("course", teacherCourse);
resultJson.put("students", completeStudentsImgs(oldTeacherServiceApi.ongoingStudents(utoken, teacherCourse.getCourseId(), teacherCourse.getCourseSkuId())));
}
return MomiaHttpResponse.SUCCESS(resultJson);
}
@RequestMapping(value = "/course/notfinished", method = RequestMethod.GET)
public MomiaHttpResponse notfinished(@RequestParam String utoken, @RequestParam int start) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (start < 0) return MomiaHttpResponse.BAD_REQUEST;
User user = userServiceApi.get(utoken);
PagedList<TeacherCourse> courses = courseServiceApi.queryNotFinishedByTeacher(user.getId(), start, Configuration.getInt("PageSize.Course"));
completeMiddleTeacherCoursesImgs(courses.getList());
return MomiaHttpResponse.SUCCESS(courses);
}
private List<TeacherCourse> completeMiddleTeacherCoursesImgs(List<TeacherCourse> teacherCourses) {
for (TeacherCourse teacherCourse : teacherCourses) {
teacherCourse.setCover(completeMiddleImg(teacherCourse.getCover()));
}
return teacherCourses;
}
@RequestMapping(value = "/course/notfinished/student", method = RequestMethod.GET)
public MomiaHttpResponse notfinishedStudents(@RequestParam String utoken,
@RequestParam(value = "coid") long courseId,
@RequestParam(value = "sid") long courseSkuId) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (courseId <= 0 || courseSkuId <= 0) return MomiaHttpResponse.BAD_REQUEST;
List<Student> students = oldTeacherServiceApi.notfinishedStudents(utoken, courseId, courseSkuId);
completeStudentsImgs(students);
return MomiaHttpResponse.SUCCESS(students);
}
private List<Student> completeStudentsImgs(List<Student> students) {
for (Student student : students) {
completeStudentImgs(student);
}
return students;
}
private Student completeStudentImgs(Student student) {
student.setAvatar(completeSmallImg(student.getAvatar()));
return student;
}
@RequestMapping(value = "/course/finished", method = RequestMethod.GET)
public MomiaHttpResponse finished(@RequestParam String utoken, @RequestParam int start) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (start < 0) return MomiaHttpResponse.BAD_REQUEST;
User user = userServiceApi.get(utoken);
PagedList<TeacherCourse> courses = courseServiceApi.queryFinishedByTeacher(user.getId(), start, Configuration.getInt("PageSize.Course"));
completeMiddleTeacherCoursesImgs(courses.getList());
return MomiaHttpResponse.SUCCESS(courses);
}
@RequestMapping(value = "/course/finished/student", method = RequestMethod.GET)
public MomiaHttpResponse finishedStudents(@RequestParam String utoken,
@RequestParam(value = "coid") long courseId,
@RequestParam(value = "sid") long courseSkuId) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (courseId <= 0 || courseSkuId <= 0) return MomiaHttpResponse.BAD_REQUEST;
List<Student> students = oldTeacherServiceApi.finishedStudents(utoken, courseId, courseSkuId);
completeStudentsImgs(students);
return MomiaHttpResponse.SUCCESS(students);
}
@RequestMapping(value = "/course/checkin", method = RequestMethod.POST)
public MomiaHttpResponse checkin(@RequestParam String utoken,
@RequestParam(value = "uid") long userId,
@RequestParam(value = "pid") long packageId,
@RequestParam(value = "coid") long courseId,
@RequestParam(value = "sid") long courseSkuId) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (userId <= 0 || packageId <= 0 || courseId <= 0 || courseSkuId <= 0) return MomiaHttpResponse.BAD_REQUEST;
return MomiaHttpResponse.SUCCESS(oldTeacherServiceApi.checkin(utoken, userId, packageId, courseId, courseSkuId));
}
@RequestMapping(value = "/student", method = RequestMethod.GET)
public MomiaHttpResponse student(@RequestParam String utoken, @RequestParam(value = "cid") long childId, @RequestParam int start) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (childId <= 0 || start < 0) return MomiaHttpResponse.BAD_REQUEST;
JSONObject studentJson = new JSONObject();
if (start == 0) {
Child child = childServiceApi.get(utoken, childId);
if (!child.exists()) return MomiaHttpResponse.FAILED("");
studentJson.put("child", completeStudentImgs(buildStudent(child)));
}
PagedList<ChildComment> pagedComments = childServiceApi.listComments(utoken, childId, start, Configuration.getInt("PageSize.ChildComment"));
studentJson.put("comments", buildStudentComments(pagedComments));
return MomiaHttpResponse.SUCCESS(studentJson);
}
private PagedList<JSONObject> buildStudentComments(PagedList<ChildComment> pagedComments) {
Set<Long> teacherUserIds = new HashSet<Long>();
Set<Long> courseIds = new HashSet<Long>();
Set<Long> courseSkuIds = new HashSet<Long>();
for (ChildComment comment : pagedComments.getList()) {
teacherUserIds.add(comment.getTeacherUserId());
courseIds.add(comment.getCourseId());
courseSkuIds.add(comment.getCourseSkuId());
}
List<User> teacherUsers = userServiceApi.list(teacherUserIds, User.Type.MINI);
Map<Long, User> teacherUsersMap = new HashMap<Long, User>();
for (User user : teacherUsers) {
teacherUsersMap.put(user.getId(), user);
}
List<Course> courses = courseServiceApi.list(courseIds);
Map<Long, Course> coursesMap = new HashMap<Long, Course>();
for (Course course : courses) {
coursesMap.put(course.getId(), course);
}
List<CourseSku> skus = courseServiceApi.listSkus(courseSkuIds);
Map<Long, CourseSku> skusMap = new HashMap<Long, CourseSku>();
for (CourseSku sku : skus) {
skusMap.put(sku.getId(), sku);
}
List<JSONObject> studentComments = new ArrayList<JSONObject>();
for (ChildComment comment : pagedComments.getList()) {
User teacherUser = teacherUsersMap.get(comment.getTeacherUserId());
Course course = coursesMap.get(comment.getCourseId());
CourseSku sku = skusMap.get(comment.getCourseSkuId());
if (teacherUser == null || course == null || sku == null) continue;
JSONObject studentComment = new JSONObject();
studentComment.put("date", TimeUtil.SHORT_DATE_FORMAT.format(sku.getStartTime()));
studentComment.put("title", course.getTitle());
studentComment.put("content", comment.getContent());
studentComment.put("teacher", teacherUser.getNickName());
studentComments.add(studentComment);
}
PagedList<JSONObject> pagedStudentComments = new PagedList<JSONObject>();
pagedStudentComments.setTotalCount(pagedComments.getTotalCount());
pagedStudentComments.setNextIndex(pagedComments.getNextIndex());
pagedStudentComments.setList(studentComments);
return pagedStudentComments;
}
private Student buildStudent(Child child) {
Student student = new Student();
student.setId(child.getId());
student.setUserId(child.getUserId());
student.setAvatar(child.getAvatar());
student.setName(child.getName());
student.setBirthday(child.getBirthday());
student.setSex(child.getSex());
return student;
}
@RequestMapping(value = "/student/record", method = RequestMethod.GET)
public MomiaHttpResponse record(@RequestParam String utoken,
@RequestParam(value = "cid") long childId,
@RequestParam(value = "coid") long courseId,
@RequestParam(value = "sid") long courseSkuId) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (courseId <= 0 || courseSkuId <= 0 || childId <= 0) return MomiaHttpResponse.BAD_REQUEST;
Child child = childServiceApi.get(utoken, childId);
if (!child.exists()) return MomiaHttpResponse.FAILED("");
List<ChildTag> tags = childServiceApi.listAllTags();
ChildRecord record = childServiceApi.getRecord(utoken, childId, courseId, courseSkuId);
JSONObject recordJson = new JSONObject();
recordJson.put("child", completeStudentImgs(buildStudent(child)));
recordJson.put("tags", tags);
recordJson.put("record", record);
return MomiaHttpResponse.SUCCESS(recordJson);
}
@RequestMapping(value = "/student/record", method = RequestMethod.POST)
public MomiaHttpResponse record(@RequestParam String utoken,
@RequestParam(value = "cid") long childId,
@RequestParam(value = "coid") long courseId,
@RequestParam(value = "sid") long courseSkuId,
@RequestParam String record) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (courseId <= 0 || courseSkuId <= 0 || childId <= 0 || StringUtils.isBlank(record)) return MomiaHttpResponse.BAD_REQUEST;
return MomiaHttpResponse.SUCCESS(childServiceApi.record(utoken, childId, courseId, courseSkuId, record));
}
@RequestMapping(value = "/student/comment", method = RequestMethod.POST)
public MomiaHttpResponse comment(@RequestParam String utoken,
@RequestParam(value = "cid") long childId,
@RequestParam(value = "coid") long courseId,
@RequestParam(value = "sid") long courseSkuId,
@RequestParam String comment) {
if (StringUtils.isBlank(utoken)) return MomiaHttpResponse.TOKEN_EXPIRED;
if (courseId <= 0 || courseSkuId <= 0 || childId <= 0 || StringUtils.isBlank(comment)) return MomiaHttpResponse.BAD_REQUEST;
return MomiaHttpResponse.SUCCESS(childServiceApi.comment(utoken, childId, courseId, courseSkuId, comment));
}
} |
package co.andrewbates.grade.data;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.HashMap;
import java.util.UUID;
import org.hildan.fxgson.FxGson;
import com.google.gson.Gson;
import com.google.gson.JsonIOException;
import com.google.gson.JsonSyntaxException;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.concurrent.Task;
public abstract class BaseModelLoader<T extends Model> implements ModelLoader<T> {
private ObservableList<T> list;
private Class<T> modelClass;
private Path path;
private HashMap<UUID, T> index;
public BaseModelLoader(Class<T> modelClass) {
this.modelClass = modelClass;
this.list = FXCollections.observableArrayList();
this.index = new HashMap<UUID, T>();
}
protected void initialize(T model) {
}
public T get(UUID id) {
return index.get(id);
}
T load(File file) throws DataException {
if (file.isDirectory()) {
file = file.toPath().resolve("model.json").toFile();
}
if (!file.exists()) {
return null;
}
T model = null;
Gson gson = FxGson.create();
try {
model = gson.fromJson(new FileReader(file), modelClass);
initialize(model);
list.add(model);
index.put(model.getID(), model);
} catch (JsonSyntaxException | JsonIOException | FileNotFoundException e) {
// TODO this needs to throw something
e.printStackTrace(System.err);
}
return model;
}
T load(Path path) throws DataException {
return load(path.toFile());
}
T load(String name) throws DataException {
File file = getPath().resolve(name).toFile();
if (file.exists()) {
return load(file);
}
return null;
}
public Task<Void> loadAll(Path path) throws DataException {
setPath(path);
return loadAll();
}
public Task<Void> loadAll() throws DataException {
return new Task<Void>() {
@Override
protected Void call() throws Exception {
list.clear();
File[] files = getPath().toFile().listFiles();
for (int i = 0; i < files.length; i++) {
load(files[i]);
updateProgress(i, files.length);
}
updateProgress(1.0, 1.0);
succeeded();
return null;
}
};
}
void setPath(Path path) {
this.path = path;
}
public Path getPath() {
return path;
}
@Override
public Path getPath(T object) {
return getPath().resolve(object.getID().toString());
}
public void delete(T object) throws IOException {
Files.walkFileTree(getPath(object), new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
});
list.remove(object);
}
public ObservableList<T> list() {
return list;
}
@Override
public void save(T object) throws IOException {
if (object.getID() == null) {
object.setID(UUID.randomUUID());
list.add(object);
index.put(object.getID(), object);
FXCollections.sort(list);
}
File dir = getPath(object).toFile();
if (!dir.exists()) {
dir.mkdirs();
}
File file = dir.toPath().resolve("model.json").toFile();
Gson gson = FxGson.create();
FileWriter writer = new FileWriter(file);
writer.write(gson.toJson(object));
writer.close();
}
} |
package com.akiban.server;
import java.math.BigDecimal;
import java.math.BigInteger;
import com.akiban.ais.model.CharsetAndCollation;
import com.akiban.ais.model.Column;
import com.akiban.qp.operator.Cursor;
import com.akiban.server.collation.AkCollator;
import com.akiban.server.collation.AkCollatorFactory;
import com.akiban.server.types.AkType;
import com.akiban.server.types.ValueSourceHelper;
import com.akiban.server.types.ValueTarget;
import com.akiban.util.ByteSource;
import com.persistit.Key;
public final class PersistitKeyValueTarget implements ValueTarget {
private AkCollator collator = AkCollatorFactory.UCS_BINARY_COLLATOR;
// PersistitKeyValueTarget interface
public void attach(Key key) {
this.key = key;
}
public PersistitKeyValueTarget expectingType(AkType type) {
if (type == AkType.INTERVAL_MILLIS || type == AkType.INTERVAL_MONTH)
throw new UnsupportedOperationException();
this.type = type;
return this;
}
public PersistitKeyValueTarget expectingType(Column column) {
expectingType(column.getType().akType());
// if (type == AkType.VARCHAR || type == AkType.TEXT) {
// final CharsetAndCollation cac = column.getCharsetAndCollation();
// if (cac != null) {
// final String collationName = cac.collation();
// if ("latin1_swedish_ci".equals(collationName)) {
// collator = AkCollatorFactory.getCollator("en_US");
return this;
}
// ValueTarget interface
@Override
public void putNull() {
checkState(AkType.NULL);
key.append(null);
invalidate();
}
@Override
public void putDate(long value) {
checkState(AkType.DATE);
key.append(value);
invalidate();
}
@Override
public void putDateTime(long value) {
checkState(AkType.DATETIME);
key.append(value);
invalidate();
}
@Override
public void putDecimal(BigDecimal value) {
checkState(AkType.DECIMAL);
key.append(value);
invalidate();
}
@Override
public void putDouble(double value) {
checkState(AkType.DOUBLE);
key.append(value);
invalidate();
}
@Override
public void putFloat(float value) {
checkState(AkType.FLOAT);
key.append(value);
invalidate();
}
@Override
public void putInt(long value) {
checkState(AkType.INT);
key.append(value);
invalidate();
}
@Override
public void putLong(long value) {
checkState(AkType.LONG);
key.append(value);
invalidate();
}
@Override
public void putString(String value) {
checkState(AkType.VARCHAR);
collator.append(key, value);
invalidate();
}
@Override
public void putText(String value) {
checkState(AkType.TEXT);
collator.append(key, value);
invalidate();
}
@Override
public void putTime(long value) {
checkState(AkType.TIME);
key.append(value);
invalidate();
}
@Override
public void putTimestamp(long value) {
checkState(AkType.TIMESTAMP);
key.append(value);
invalidate();
}
@Override
public void putInterval_Millis(long value) {
throw new UnsupportedOperationException("interval not supported yet");
}
@Override
public void putInterval_Month(long value) {
throw new UnsupportedOperationException("interval not supported yet");
}
@Override
public void putUBigInt(BigInteger value) {
checkState(AkType.U_BIGINT);
key.append(value);
invalidate();
}
@Override
public void putUDouble(double value) {
checkState(AkType.U_DOUBLE);
key.append(value);
invalidate();
}
@Override
public void putUFloat(float value) {
checkState(AkType.U_FLOAT);
key.append(value);
invalidate();
}
@Override
public void putUInt(long value) {
checkState(AkType.U_INT);
key.append(value);
invalidate();
}
@Override
public void putVarBinary(ByteSource value) {
checkState(AkType.VARBINARY);
key().appendByteArray(value.byteArray(), value.byteArrayOffset(), value.byteArrayLength());
invalidate();
}
@Override
public void putYear(long value) {
checkState(AkType.YEAR);
key.append(value);
invalidate();
}
@Override
public void putBool(boolean value) {
checkState(AkType.BOOL);
key.append(value);
invalidate();
}
@Override
public void putResultSet(Cursor value) {
throw new UnsupportedOperationException();
}
@Override
public AkType getConversionType() {
return type;
}
// object interface
@Override
public String toString() {
return key().toString();
}
// for use by this class
protected final Key key() {
return key;
}
// private methods
private void checkState(AkType type) {
ValueSourceHelper.checkType(this.type, type);
}
private void invalidate() {
type = AkType.UNSUPPORTED;
}
// object state
private Key key;
private AkType type = AkType.UNSUPPORTED;
} |
package com.akiban.server.service.ui;
import com.akiban.server.service.ServiceManager;
import java.awt.*;
import java.awt.event.*;
import java.io.File;
import java.io.IOException;
import javax.swing.*;
import javax.swing.text.*;
import java.net.URL;
import java.io.PrintStream;
public class SwingConsole extends JFrame implements WindowListener
{
public static final String TITLE = "Akiban Server";
public static final String ICON_PATH = "Akiban_Server_128x128.png";
private final ServiceManager serviceManager;
private JTextArea textArea;
private PrintStream printStream;
private final String[] RUN_PSQL_CMD;
private String PSQL_ARGS = null;
public SwingConsole(ServiceManager serviceManager) {
super(TITLE);
this.serviceManager = serviceManager;
setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
// output area
textArea = new JTextArea(50, 100);
textArea.setLineWrap(true);
DefaultCaret caret = (DefaultCaret)textArea.getCaret();
caret.setUpdatePolicy(DefaultCaret.ALWAYS_UPDATE);
textArea.setEditable(false);
JScrollPane scrollPane = new JScrollPane(textArea);
add(scrollPane);
// menu
addWindowListener(this);
{
String osName = System.getProperty("os.name");
boolean macOSX = "Mac OS X".equals(osName);
int shift = (macOSX) ? InputEvent.META_MASK : InputEvent.CTRL_MASK;
JMenuBar menuBar = new JMenuBar();
// File menu
if (!macOSX || !Boolean.getBoolean("apple.laf.useScreenMenuBar")) {
JMenu fileMenu = new JMenu("File");
fileMenu.setMnemonic(KeyEvent.VK_F);
JMenuItem quitMenuItem = new JMenuItem("Quit", KeyEvent.VK_Q);
quitMenuItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
quit();
}
});
quitMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_Q, shift));
fileMenu.add(quitMenuItem);
menuBar.add(fileMenu);
}
// Edit menu
JMenu editMenu = new JMenu("Edit");
editMenu.setMnemonic(KeyEvent.VK_E);
Action action = new DefaultEditorKit.CutAction();
action.putValue(Action.NAME, "Cut");
action.putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_X, shift));
editMenu.add(action);
action = new DefaultEditorKit.CopyAction();
action.putValue(Action.NAME, "Copy");
action.putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_C, shift));
editMenu.add(action);
action = new DefaultEditorKit.PasteAction();
action.putValue(Action.NAME, "Paste");
action.putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_V, shift));
editMenu.add(action);
action = new TextAction(DefaultEditorKit.selectAllAction) {
public void actionPerformed(ActionEvent e) {
getFocusedComponent().selectAll();
}
};
action.putValue(Action.NAME, "Select All");
action.putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_A, shift));
editMenu.add(action);
JMenuItem clearAll = editMenu.add("Clear Console");
clearAll.setMnemonic(KeyEvent.VK_R);
clearAll.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_K,
shift));
clearAll.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent ae)
{
textArea.setText("");
}
});
menuBar.add(editMenu);
// Run menu
JMenu run = new JMenu("Run");
run.setMnemonic(KeyEvent.VK_W);
JMenuItem runPsql = run.add("Run PSQL client");
runPsql.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_F6,
shift));
int port = this.serviceManager.getPostgresService().getPort();
if (macOSX)
RUN_PSQL_CMD = new String[] { "osascript", "-e",
"tell application \"Terminal\"\n activate\n do script \"exec psql -h localhost -p" + port + "\"\n end tell" };
else if (osName.startsWith("Window"))
RUN_PSQL_CMD = new String[]{"cmd.exe", "/c",
"start psql -h localhost -p" + port };
else // assuming unix-based system
RUN_PSQL_CMD = new String[]{new File("/etc/alternatives/x-terminal-emulator").exists()
? ""
: "xterm",
"-e", "psql -h localhost -p" + port};
runPsql.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent ae)
{
try
{
Runtime.getRuntime().exec(RUN_PSQL_CMD);
}
catch (IOException ex)
{
JOptionPane.showMessageDialog(SwingConsole.this,
"Unable to open Terminal\nError: " + ex.getMessage(),
"Error",
JOptionPane.ERROR_MESSAGE);
}
}
});
menuBar.add(run);
setJMenuBar(menuBar);
}
// centerise the window
pack();
Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
setSize(screenSize.width/2, screenSize.height/2);
setLocationRelativeTo(null);
URL iconURL = SwingConsole.class.getClassLoader().getResource(SwingConsole.class.getPackage().getName().replace('.', '/') + "/" + ICON_PATH);
if (iconURL != null) {
ImageIcon icon = new ImageIcon(iconURL);
setIconImage(icon.getImage());
}
}
@Override
public void windowClosing(WindowEvent arg0) {
quit();
}
@Override
public void windowClosed(WindowEvent arg0) {
}
@Override
public void windowActivated(WindowEvent arg0) {
}
@Override
public void windowDeactivated(WindowEvent arg0) {
}
@Override
public void windowDeiconified(WindowEvent arg0) {
}
@Override
public void windowIconified(WindowEvent arg0) {
}
@Override
public void windowOpened(WindowEvent arg0) {
}
public PrintStream getPrintStream() {
return printStream;
}
static class TextAreaPrintStream extends PrintStream {
public TextAreaPrintStream() {
this(new TextAreaOutputStream());
}
public TextAreaPrintStream(TextAreaOutputStream out) {
super(out, true);
}
public TextAreaOutputStream getOut() {
return (TextAreaOutputStream)out;
}
}
public PrintStream openPrintStream(boolean reuseSystem) {
if (reuseSystem &&
(System.out instanceof TextAreaPrintStream) &&
((TextAreaPrintStream)System.out).getOut().setTextAreaIfUnbound(textArea)) {
printStream = System.out;
}
else {
printStream = new PrintStream(new TextAreaOutputStream(textArea));
}
return printStream;
}
public void closePrintStream() {
if (printStream == System.out) {
((TextAreaPrintStream)System.out).getOut().clearTextAreaIfBound(textArea);
}
printStream = null;
}
protected void quit() {
switch (serviceManager.getState()) {
case ERROR_STARTING:
dispose();
break;
default:
int yn = JOptionPane.showConfirmDialog(this,
"Do you really want to quit Akiban-Server?",
"Attention!",
JOptionPane.YES_NO_OPTION,
JOptionPane.QUESTION_MESSAGE);
if (yn != JOptionPane.YES_OPTION)
return;
try {
serviceManager.stopServices();
}
catch (Exception ex) {
}
break;
}
}
} |
package soot.jimple.infoflow.android;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import soot.Body;
import soot.Local;
import soot.MethodOrMethodContext;
import soot.PackManager;
import soot.RefType;
import soot.Scene;
import soot.SceneTransformer;
import soot.SootClass;
import soot.SootMethod;
import soot.Transform;
import soot.Type;
import soot.Unit;
import soot.Value;
import soot.jimple.DefinitionStmt;
import soot.jimple.IdentityStmt;
import soot.jimple.InstanceInvokeExpr;
import soot.jimple.IntConstant;
import soot.jimple.InvokeExpr;
import soot.jimple.ReturnVoidStmt;
import soot.jimple.Stmt;
import soot.jimple.infoflow.android.data.AndroidMethod;
import soot.jimple.infoflow.data.SootMethodAndClass;
import soot.jimple.infoflow.util.SootMethodRepresentationParser;
import soot.jimple.toolkits.callgraph.Edge;
import soot.jimple.toolkits.callgraph.ReachableMethods;
import soot.toolkits.graph.ExceptionalUnitGraph;
import soot.toolkits.scalar.SimpleLiveLocals;
import soot.toolkits.scalar.SmartLocalDefs;
import soot.util.HashMultiMap;
import soot.util.MultiMap;
/**
* Analyzes the classes in the APK file to find custom implementations of the
* well-known Android callback and handler interfaces.
*
* @author Steven Arzt
*
*/
public class AnalyzeJimpleClass {
private final Set<String> entryPointClasses;
private final Set<String> androidCallbacks;
private final Map<String, Set<SootMethodAndClass>> callbackMethods =
new HashMap<String, Set<SootMethodAndClass>>();
private final Map<String, Set<SootMethodAndClass>> callbackWorklist =
new HashMap<String, Set<SootMethodAndClass>>();
private final Map<String, Set<Integer>> layoutClasses =
new HashMap<String, Set<Integer>>();
private final Set<String> dynamicManifestComponents =
new HashSet<>();
public AnalyzeJimpleClass(Set<String> entryPointClasses) throws IOException {
this.entryPointClasses = entryPointClasses;
this.androidCallbacks = loadAndroidCallbacks();
}
public AnalyzeJimpleClass(Set<String> entryPointClasses,
Set<String> androidCallbacks) {
this.entryPointClasses = entryPointClasses;
this.androidCallbacks = new HashSet<String>();
}
/**
* Loads the set of interfaces that are used to implement Android callback
* handlers from a file on disk
* @return A set containing the names of the interfaces that are used to
* implement Android callback handlers
*/
private Set<String> loadAndroidCallbacks() throws IOException {
Set<String> androidCallbacks = new HashSet<String>();
BufferedReader rdr = null;
try {
String fileName = "AndroidCallbacks.txt";
if (!new File(fileName).exists()) {
fileName = "../soot-infoflow-android/AndroidCallbacks.txt";
if (!new File(fileName).exists())
throw new RuntimeException("Callback definition file not found");
}
rdr = new BufferedReader(new FileReader(fileName));
String line;
while ((line = rdr.readLine()) != null)
if (!line.isEmpty())
androidCallbacks.add(line);
}
finally {
if (rdr != null)
rdr.close();
}
return androidCallbacks;
}
/**
* Collects the callback methods for all Android default handlers
* implemented in the source code.
* Note that this operation runs inside Soot, so this method only registers
* a new phase that will be executed when Soot is next run
*/
public void collectCallbackMethods() {
Transform transform = new Transform("wjtp.ajc", new SceneTransformer() {
protected void internalTransform(String phaseName, @SuppressWarnings("rawtypes") Map options) {
// Find the mappings between classes and layouts
findClassLayoutMappings();
// Process the callback classes directly reachable from the
// entry points
for (String className : entryPointClasses) {
SootClass sc = Scene.v().getSootClass(className);
List<MethodOrMethodContext> methods = new ArrayList<MethodOrMethodContext>();
methods.addAll(sc.getMethods());
// Check for callbacks registered in the code
analyzeRechableMethods(sc, methods);
// Check for method overrides
analyzeMethodOverrideCallbacks(sc);
}
System.out.println("Callback analysis done.");
}
});
PackManager.v().getPack("wjtp").add(transform);
}
/**
* Incrementally collects the callback methods for all Android default
* handlers implemented in the source code. This just processes the contents
* of the worklist.
* Note that this operation runs inside Soot, so this method only registers
* a new phase that will be executed when Soot is next run
*/
public void collectCallbackMethodsIncremental() {
Transform transform = new Transform("wjtp.ajc", new SceneTransformer() {
protected void internalTransform(String phaseName, @SuppressWarnings("rawtypes") Map options) {
// Process the worklist from last time
System.out.println("Running incremental callback analysis for " + callbackWorklist.size()
+ " components...");
MultiMap<String, SootMethodAndClass> workListCopy =
new HashMultiMap<String, SootMethodAndClass>(callbackWorklist);
for (String className : workListCopy.keySet()) {
List<MethodOrMethodContext> entryClasses = new LinkedList<MethodOrMethodContext>();
for (SootMethodAndClass am : workListCopy.get(className))
entryClasses.add(Scene.v().getMethod(am.getSignature()));
analyzeRechableMethods(Scene.v().getSootClass(className), entryClasses);
callbackWorklist.remove(className);
}
System.out.println("Incremental callback analysis done.");
}
});
PackManager.v().getPack("wjtp").add(transform);
}
private void analyzeRechableMethods(SootClass lifecycleElement, List<MethodOrMethodContext> methods) {
ReachableMethods rm = new ReachableMethods(Scene.v().getCallGraph(), methods);
rm.update();
// Scan for listeners in the class hierarchy
Iterator<MethodOrMethodContext> reachableMethods = rm.listener();
while (reachableMethods.hasNext()) {
SootMethod method = reachableMethods.next().method();
analyzeMethodForCallbackRegistrations(lifecycleElement, method);
analyzeMethodForDynamicBroadcastReceiver(method);
}
}
/**
* Analyzes the given method and looks for callback registrations
* @param lifecycleElement The lifecycle element (activity, etc.) with which
* to associate the found callbacks
* @param method The method in which to look for callbacks
*/
private void analyzeMethodForCallbackRegistrations(SootClass lifecycleElement, SootMethod method) {
// Do not analyze system classes
if (method.getDeclaringClass().getName().startsWith("android.")
|| method.getDeclaringClass().getName().startsWith("java."))
return;
if (!method.isConcrete())
return;
ExceptionalUnitGraph graph = new ExceptionalUnitGraph(method.retrieveActiveBody());
SmartLocalDefs smd = new SmartLocalDefs(graph, new SimpleLiveLocals(graph));
// Iterate over all statement and find callback registration methods
Set<SootClass> callbackClasses = new HashSet<SootClass>();
for (Unit u : method.retrieveActiveBody().getUnits()) {
Stmt stmt = (Stmt) u;
// Callback registrations are always instance invoke expressions
if (stmt.containsInvokeExpr() && stmt.getInvokeExpr() instanceof InstanceInvokeExpr) {
InstanceInvokeExpr iinv = (InstanceInvokeExpr) stmt.getInvokeExpr();
String[] parameters = SootMethodRepresentationParser.v().getParameterTypesFromSubSignature(
iinv.getMethodRef().getSubSignature().getString());
for (int i = 0; i < parameters.length; i++) {
String param = parameters[i];
if (androidCallbacks.contains(param)) {
Value arg = iinv.getArg(i);
// We have a formal parameter type that corresponds to one of the Android
// callback interfaces. Look for definitions of the parameter to estimate
// the actual type.
if (arg.getType() instanceof RefType && arg instanceof Local)
for (Unit def : smd.getDefsOfAt((Local) arg, u)) {
assert def instanceof DefinitionStmt;
Type tp = ((DefinitionStmt) def).getRightOp().getType();
if (tp instanceof RefType) {
SootClass callbackClass = ((RefType) tp).getSootClass();
if (callbackClass.isInterface())
for (SootClass impl : Scene.v().getActiveHierarchy().getImplementersOf(callbackClass))
for (SootClass c : Scene.v().getActiveHierarchy().getSubclassesOfIncluding(impl))
callbackClasses.add(c);
else
for (SootClass c : Scene.v().getActiveHierarchy().getSubclassesOfIncluding(callbackClass))
callbackClasses.add(c);
}
}
}
}
}
}
// Analyze all found callback classes
for (SootClass callbackClass : callbackClasses)
analyzeClass(callbackClass, lifecycleElement);
}
/**
* Checks whether the given method dynamically registers a new broadcast
* receiver
* @param method The method to check
*/
private void analyzeMethodForDynamicBroadcastReceiver(SootMethod method) {
if (!method.isConcrete() || !method.hasActiveBody())
return;
// stmt.getInvokeExpr().getMethod().getDeclaringClass().getName().equals("android.content.Context")
for (Unit u : method.getActiveBody().getUnits()) {
Stmt stmt = (Stmt) u;
if (stmt.containsInvokeExpr()) {
if (stmt.getInvokeExpr().getMethod().getName().equals("registerReceiver")
&& stmt.getInvokeExpr().getArgCount() > 0
&& isInheritedMethod(stmt, "android.content.ContextWrapper",
"android.content.Context")) {
Value br = stmt.getInvokeExpr().getArg(0);
if (br.getType() instanceof RefType) {
RefType rt = (RefType) br.getType();
dynamicManifestComponents.add(rt.getClassName());
}
}
}
}
}
private boolean isInheritedMethod(Stmt stmt, String... classNames) {
Iterator<Edge> edgeIt = Scene.v().getCallGraph().edgesOutOf(stmt);
while (edgeIt.hasNext()) {
Edge edge = edgeIt.next();
String targetClass = edge.getTgt().method().getDeclaringClass().getName();
for (String className : classNames)
if (className.equals(targetClass))
return true;
}
return false;
}
/**
* Finds the mappings between classes and their respective layout files
*/
private void findClassLayoutMappings() {
Iterator<MethodOrMethodContext> rmIterator = Scene.v().getReachableMethods().listener();
while (rmIterator.hasNext()) {
SootMethod sm = rmIterator.next().method();
if (!sm.isConcrete())
continue;
for (Unit u : sm.retrieveActiveBody().getUnits())
if (u instanceof Stmt) {
Stmt stmt = (Stmt) u;
if (stmt.containsInvokeExpr()) {
InvokeExpr inv = stmt.getInvokeExpr();
if (invokesSetContentView(inv)) {
for (Value val : inv.getArgs())
if (val instanceof IntConstant) {
IntConstant constVal = (IntConstant) val;
Set<Integer> layoutIDs = this.layoutClasses.get(sm.getDeclaringClass().getName());
if (layoutIDs == null) {
layoutIDs = new HashSet<Integer>();
this.layoutClasses.put(sm.getDeclaringClass().getName(), layoutIDs);
}
layoutIDs.add(constVal.value);
}
}
}
}
}
}
/**
* Checks whether this invocation calls Android's Activity.setContentView
* method
* @param inv The invocaton to check
* @return True if this invocation calls setContentView, otherwise false
*/
private boolean invokesSetContentView(InvokeExpr inv) {
String methodName = SootMethodRepresentationParser.v().getMethodNameFromSubSignature(
inv.getMethodRef().getSubSignature().getString());
if (!methodName.equals("setContentView"))
return false;
// In some cases, the bytecode points the invocation to the current
// class even though it does not implement setContentView, instead
// of using the superclass signature
SootClass curClass = inv.getMethod().getDeclaringClass();
while (curClass != null) {
if (curClass.getName().equals("android.app.Activity")
|| curClass.getName().equals("android.support.v7.app.ActionBarActivity"))
return true;
if (curClass.declaresMethod("void setContentView(int)"))
return false;
curClass = curClass.hasSuperclass() ? curClass.getSuperclass() : null;
}
return false;
}
/**
* Analyzes the given class to find callback methods
* @param sootClass The class to analyze
* @param lifecycleElement The lifecycle element (activity, service, etc.)
* to which the callback methods belong
*/
private void analyzeClass(SootClass sootClass, SootClass lifecycleElement) {
// Do not analyze system classes
if (sootClass.getName().startsWith("android.")
|| sootClass.getName().startsWith("java."))
return;
// Check for callback handlers implemented via interfaces
analyzeClassInterfaceCallbacks(sootClass, sootClass, lifecycleElement);
}
private void analyzeMethodOverrideCallbacks(SootClass sootClass) {
if (!sootClass.isConcrete())
return;
if (sootClass.isInterface())
return;
// Do not start the search in system classes
if (sootClass.getName().startsWith("android.")
|| sootClass.getName().startsWith("java.")
|| sootClass.getName().startsWith("com.google."))
return;
// There are also some classes that implement interesting callback methods.
// We model this as follows: Whenever the user overwrites a method in an
// Android OS class, we treat it as a potential callback.
Set<String> systemMethods = new HashSet<String>(10000);
for (SootClass parentClass : Scene.v().getActiveHierarchy().getSuperclassesOf(sootClass)) {
if (parentClass.getName().startsWith("android.") || parentClass.getName().startsWith("com.google."))
for (SootMethod sm : parentClass.getMethods())
if (!sm.isConstructor())
systemMethods.add(sm.getSubSignature());
}
// Iterate over all user-implemented methods. If they are inherited
// from a system class, they are callback candidates.
for (SootClass parentClass : Scene.v().getActiveHierarchy().getSubclassesOfIncluding(sootClass)) {
if (parentClass.getName().startsWith("android."))
continue;
for (SootMethod method : parentClass.getMethods()) {
if (!systemMethods.contains(method.getSubSignature()))
continue;
// This is a real callback method
checkAndAddMethod(method, sootClass);
}
}
}
private SootMethod getMethodFromHierarchyEx(SootClass c, String methodSignature) {
if (c.declaresMethod(methodSignature))
return c.getMethod(methodSignature);
if (c.hasSuperclass())
return getMethodFromHierarchyEx(c.getSuperclass(), methodSignature);
throw new RuntimeException("Could not find method");
}
private void analyzeClassInterfaceCallbacks(SootClass baseClass, SootClass sootClass,
SootClass lifecycleElement) {
// We cannot create instances of abstract classes anyway, so there is no
// reason to look for interface implementations
if (!baseClass.isConcrete())
return;
// For a first take, we consider all classes in the android.* packages
// to be part of the operating system
if (baseClass.getName().startsWith("android."))
return;
// If we are a class, one of our superclasses might implement an Android
// interface
if (sootClass.hasSuperclass())
analyzeClassInterfaceCallbacks(baseClass, sootClass.getSuperclass(), lifecycleElement);
// Do we implement one of the well-known interfaces?
for (SootClass i : collectAllInterfaces(sootClass)) {
if (androidCallbacks.contains(i.getName()))
for (SootMethod sm : i.getMethods())
checkAndAddMethod(getMethodFromHierarchyEx(baseClass,
sm.getSubSignature()), lifecycleElement);
}
}
/**
* Checks whether the given Soot method comes from a system class. If not,
* it is added to the list of callback methods.
* @param method The method to check and add
* @param baseClass The base class (activity, service, etc.) to which this
* callback method belongs
*/
private void checkAndAddMethod(SootMethod method, SootClass baseClass) {
AndroidMethod am = new AndroidMethod(method);
// Do not call system methods
if (am.getClassName().startsWith("android.")
|| am.getClassName().startsWith("java."))
return;
// Skip empty methods
if (method.isConcrete() && isEmpty(method.retrieveActiveBody()))
return;
boolean isNew;
if (this.callbackMethods.containsKey(baseClass.getName()))
isNew = this.callbackMethods.get(baseClass.getName()).add(am);
else {
Set<SootMethodAndClass> methods = new HashSet<SootMethodAndClass>();
isNew = methods.add(am);
this.callbackMethods.put(baseClass.getName(), methods);
}
if (isNew)
if (this.callbackWorklist.containsKey(baseClass.getName()))
this.callbackWorklist.get(baseClass.getName()).add(am);
else {
Set<SootMethodAndClass> methods = new HashSet<SootMethodAndClass>();
isNew = methods.add(am);
this.callbackWorklist.put(baseClass.getName(), methods);
}
}
private boolean isEmpty(Body activeBody) {
for (Unit u : activeBody.getUnits())
if (!(u instanceof IdentityStmt || u instanceof ReturnVoidStmt))
return false;
return true;
}
private Set<SootClass> collectAllInterfaces(SootClass sootClass) {
Set<SootClass> interfaces = new HashSet<SootClass>(sootClass.getInterfaces());
for (SootClass i : sootClass.getInterfaces())
interfaces.addAll(collectAllInterfaces(i));
return interfaces;
}
public Map<String, Set<SootMethodAndClass>> getCallbackMethods() {
return this.callbackMethods;
}
public Map<String, Set<Integer>> getLayoutClasses() {
return this.layoutClasses;
}
public Set<String> getDynamicManifestComponents() {
return this.dynamicManifestComponents;
}
} |
package com.celements.web.service;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.math.BigDecimal;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.ResourceBundle;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.xwiki.component.annotation.Component;
import org.xwiki.component.annotation.Requirement;
import org.xwiki.context.Execution;
import org.xwiki.model.EntityType;
import org.xwiki.model.reference.DocumentReference;
import org.xwiki.model.reference.EntityReference;
import org.xwiki.model.reference.EntityReferenceResolver;
import org.xwiki.model.reference.EntityReferenceSerializer;
import org.xwiki.model.reference.SpaceReference;
import org.xwiki.model.reference.WikiReference;
import com.celements.inheritor.TemplatePathTransformationConfiguration;
import com.celements.navigation.cmd.MultilingualMenuNameCommand;
import com.celements.rendering.RenderCommand;
import com.celements.rendering.XHTMLtoHTML5cleanup;
import com.celements.sajson.Builder;
import com.celements.web.comparators.BaseObjectComparator;
import com.celements.web.plugin.api.CelementsWebPluginApi;
import com.celements.web.plugin.cmd.EmptyCheckCommand;
import com.celements.web.plugin.cmd.PageLayoutCommand;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.XWikiException;
import com.xpn.xwiki.api.Attachment;
import com.xpn.xwiki.api.Document;
import com.xpn.xwiki.doc.XWikiDocument;
import com.xpn.xwiki.objects.BaseObject;
import com.xpn.xwiki.render.XWikiRenderingEngine;
import com.xpn.xwiki.web.Utils;
import com.xpn.xwiki.web.XWikiMessageTool;
import com.xpn.xwiki.web.XWikiRequest;
@Component
public class WebUtilsService implements IWebUtilsService {
private static Log LOGGER = LogFactory.getFactory().getInstance(WebUtilsService.class);
@Requirement("default")
EntityReferenceSerializer<String> serializer_default;
@Requirement("local")
EntityReferenceSerializer<String> serializer_local;
@Requirement
EntityReferenceResolver<String> referenceResolver;
/**
* Used to get the template path mapping information.
*/
@Requirement
private TemplatePathTransformationConfiguration tempPathConfig;
@Requirement
Execution execution;
XWikiRenderingEngine injectedRenderingEngine;
private XWikiContext getContext() {
return (XWikiContext)execution.getContext().getProperty("xwikicontext");
}
public DocumentReference getParentForLevel(int level) {
LOGGER.trace("getParentForLevel: start for level " + level);
DocumentReference parent = null;
List<DocumentReference> parentList = getDocumentParentsList(
getContext().getDoc().getDocumentReference(), true);
int startAtItem = parentList.size() - level + 1;
if ((startAtItem > -1) && (startAtItem < parentList.size())) {
parent = parentList.get(startAtItem);
}
LOGGER.debug("getParentForLevel: level [" + level + "] returning [" + parent + "]");
return parent;
}
public List<DocumentReference> getDocumentParentsList(DocumentReference docRef,
boolean includeDoc) {
ArrayList<DocumentReference> docParents = new ArrayList<DocumentReference>();
try {
DocumentReference nextParent;
if (includeDoc) {
nextParent = docRef;
} else {
nextParent = getParentRef(docRef);
}
while ((nextParent != null)
&& getContext().getWiki().exists(nextParent, getContext())
&& !docParents.contains(nextParent)) {
docParents.add(nextParent);
nextParent = getParentRef(nextParent);
}
} catch (XWikiException e) {
LOGGER.error("Failed to get parent reference. ", e);
}
return docParents;
}
private DocumentReference getParentRef(DocumentReference docRef) throws XWikiException {
return getContext().getWiki().getDocument(docRef, getContext()).getParentReference();
}
public String getDocSectionAsJSON(String regex, DocumentReference docRef,
int section) throws XWikiException {
Builder jsonBuilder = new Builder();
jsonBuilder.openArray();
jsonBuilder.openDictionary();
jsonBuilder.addStringProperty("content", getDocSection(regex, docRef, section));
int sectionNr = countSections(regex, docRef);
jsonBuilder.openProperty("section");
jsonBuilder.addNumber(new BigDecimal(getSectionNr(section, sectionNr)));
jsonBuilder.openProperty("sectionNr");
jsonBuilder.addNumber(new BigDecimal(sectionNr));
jsonBuilder.closeDictionary();
jsonBuilder.closeArray();
return jsonBuilder.getJSON();
}
public String getDocSection(String regex, DocumentReference docRef, int section
) throws XWikiException {
LOGGER.debug("use regex '" + regex + "' on '" + docRef
+ "' and get section " + section);
XWikiDocument doc = getContext().getWiki().getDocument(docRef, getContext());
String content = doc.getTranslatedDocument(getContext()).getContent();
LOGGER.debug("content of'" + docRef + "' is: '" + content + "'");
String section_str = null;
if((content != null) && (!isEmptyRTEString(content))){
section = getSectionNr(section, countSections(regex, docRef));
for (String partStr : content.split(regex)) {
if(!isEmptyRTEString(partStr)) {
section
if(section == 0) {
section_str = partStr;
break;
}
}
}
} else {
LOGGER.debug("content ist empty");
}
if(section_str != null) {
section_str = renderText(section_str);
}
return section_str;
}
public int countSections(String regex, DocumentReference docRef) throws XWikiException {
LOGGER.debug("use regex '" + regex + "' on '" + docRef + "'");
XWikiDocument doc = getContext().getWiki().getDocument(docRef, getContext());
String content = doc.getTranslatedDocument(getContext()).getContent();
LOGGER.debug("content of'" + docRef + "' is: '" + content + "'");
int parts = 0;
if((content != null) && (!isEmptyRTEString(content))){
for (String part : content.split(regex)) {
if(!isEmptyRTEString(part)) {
parts++;
}
}
} else {
LOGGER.debug("content ist empty");
}
return parts;
}
int getSectionNr(int section, int sectionNr) {
if(section <= 0){ section = 1; }
if(section > sectionNr){ section = sectionNr; }
return section;
}
private String renderText(String velocityText) {
return getContext().getWiki().getRenderingEngine().renderText(
"{pre}" + velocityText + "{/pre}", getContext().getDoc(), getContext());
}
private boolean isEmptyRTEString(String rteContent) {
return new EmptyCheckCommand().isEmptyRTEString(rteContent);
}
public List<String> getAllowedLanguages() {
if ((getContext() != null) && (getContext().getDoc() != null)) {
return getAllowedLanguages(getContext().getDoc().getDocumentReference(
).getLastSpaceReference().getName());
}
return Collections.emptyList();
}
public List<String> getAllowedLanguages(String spaceName) {
List<String> languages = new ArrayList<String>();
languages.addAll(Arrays.asList(getContext().getWiki(
).getSpacePreference("languages", spaceName, "", getContext()).split("[ ,]")));
languages.remove("");
if (languages.size() > 0) {
return languages;
}
LOGGER.warn("Deprecated usage of Preferences field 'language'."
+ " Instead use 'languages'.");
return Arrays.asList(getContext().getWiki(
).getSpacePreference("language", spaceName, "", getContext()).split("[ ,]"));
}
public Date parseDate(String date, String format){
try{
return new SimpleDateFormat(format).parse(date);
} catch(ParseException e){
LOGGER.fatal(e);
return null;
}
}
public XWikiMessageTool getMessageTool(String adminLanguage) {
if(adminLanguage != null) {
if((getContext().getLanguage() != null) && getContext().getLanguage().equals(
adminLanguage)) {
return getContext().getMessageTool();
} else {
Locale locale = new Locale(adminLanguage);
ResourceBundle bundle = ResourceBundle.getBundle("ApplicationResources",
locale);
if (bundle == null) {
bundle = ResourceBundle.getBundle("ApplicationResources");
}
XWikiContext adminContext = (XWikiContext) getContext().clone();
adminContext.putAll(getContext());
adminContext.setLanguage(adminLanguage);
return new XWikiMessageTool(bundle, adminContext);
}
} else {
return null;
}
}
public XWikiMessageTool getAdminMessageTool() {
return getMessageTool(getAdminLanguage());
}
public String getAdminLanguage() {
return getAdminLanguage(getContext().getUser());
}
/**
* @deprecated since 2.34.0 instead use getAdminLanguage(DocumentReference userRef)
*/
@Deprecated
public String getAdminLanguage(String userFullName) {
return getAdminLanguage(resolveDocumentReference(userFullName));
}
public String getAdminLanguage(DocumentReference userRef) {
String adminLanguage = null;
try {
DocumentReference xwikiUsersClassRef = new DocumentReference(
userRef.getWikiReference().getName(), "XWiki", "XWikiUsers");
BaseObject userObj = getContext().getWiki().getDocument(userRef, getContext()
).getXObject(xwikiUsersClassRef);
if (userObj != null) {
adminLanguage = userObj.getStringValue("admin_language");
}
} catch (XWikiException e) {
LOGGER.error("failed to get UserObject for " + getContext().getUser());
}
if ((adminLanguage == null) || ("".equals(adminLanguage))) {
adminLanguage = getDefaultAdminLanguage();
}
return adminLanguage;
}
public String getDefaultAdminLanguage() {
String adminLanguage;
adminLanguage = getContext().getWiki().getSpacePreference("admin_language",
getContext().getLanguage(), getContext());
if ((adminLanguage == null) || ("".equals(adminLanguage))) {
adminLanguage = getContext().getWiki().Param("celements.admin_language");
if ((adminLanguage == null) || ("".equals(adminLanguage))) {
adminLanguage = "en";
}
}
return adminLanguage;
}
public String getDefaultLanguage() {
return getContext().getWiki().getSpacePreference("default_language", getContext());
}
public String getDefaultLanguage(String spaceName) {
return getContext().getWiki().getSpacePreference("default_language", spaceName, "",
getContext());
}
public boolean hasParentSpace() {
return ((getParentSpace() != null) && !"".equals(getParentSpace()));
}
public boolean hasParentSpace(String spaceName) {
return ((getParentSpace(spaceName) != null) && !"".equals(getParentSpace(spaceName)));
}
public String getParentSpace() {
return getContext().getWiki().getSpacePreference("parent", getContext());
}
public String getParentSpace(String spaceName) {
return getContext().getWiki().getSpacePreference("parent", spaceName, "",
getContext());
}
public DocumentReference resolveDocumentReference(String fullName) {
return resolveDocumentReference(fullName, null);
}
public DocumentReference resolveDocumentReference(String fullName,
WikiReference wikiRef) {
return new DocumentReference(resolveEntityReference(fullName, EntityType.DOCUMENT,
wikiRef));
}
public SpaceReference resolveSpaceReference(String spaceName) {
return resolveSpaceReference(spaceName, null);
}
public SpaceReference resolveSpaceReference(String spaceName, WikiReference wikiRef) {
return new SpaceReference(resolveEntityReference(spaceName, EntityType.SPACE,
wikiRef));
}
private EntityReference resolveEntityReference(String name, EntityType type,
WikiReference wikiRef) {
if (wikiRef == null) {
wikiRef = new WikiReference(getContext().getDatabase());
}
EntityReference ref = referenceResolver.resolve(name, type, wikiRef);
LOGGER.debug("resolveEntityReference: for [" + name + "] got reference [" + ref + "]");
return ref;
}
public boolean isAdminUser() {
try {
if ((getContext().getXWikiUser() != null)
&& (getContext().getWiki().getRightService() != null)
&& (getContext().getDoc() != null)) {
return (getContext().getWiki().getRightService().hasAdminRights(getContext())
|| getContext().getXWikiUser().isUserInGroup("XWiki.XWikiAdminGroup",
getContext()));
} else {
return false;
}
} catch (XWikiException e) {
LOGGER.error("Cannot determin if user has Admin Rights therefore guess"
+ " no (false).", e);
return false;
}
}
public boolean isSuperAdminUser() {
String user = getContext().getUser();
LOGGER.trace("isSuperAdminUser: user [" + user + "] db [" + getContext().getDatabase()
+ "].");
return (isAdminUser() && (user.startsWith("xwiki:") || getContext().isMainWiki()));
}
public boolean isLayoutEditor() {
String user = getContext().getUser();
LOGGER.trace("isLayoutEditor: user [" + user + "] db [" + getContext().getDatabase()
+ "].");
try {
boolean isLayoutEditor = isAdvancedAdmin() || getContext().getXWikiUser(
).isUserInGroup("XWiki.LayoutEditorsGroup", getContext());
LOGGER.debug("isLayoutEditor: admin [" + isAdminUser() + "] global user ["
+ user.startsWith("xwiki:") + "] returning [" + isLayoutEditor + "] db ["
+ getContext().getDatabase() + "].");
return isLayoutEditor;
} catch (XWikiException exp) {
LOGGER.error("Failed to get user document for [" + user + "].", exp);
}
return false;
}
public boolean isAdvancedAdmin() {
String user = getContext().getUser();
LOGGER.trace("isAdvancedAdmin: user [" + user + "] db [" + getContext().getDatabase()
+ "].");
try {
XWikiDocument userDoc = getContext().getWiki().getDocument(resolveDocumentReference(
user), getContext());
BaseObject userObj = userDoc.getXObject(resolveDocumentReference(
"XWiki.XWikiUsers"));
boolean isAdvancedAdmin = isAdminUser() && (user.startsWith("xwiki:")
|| ((userObj != null) && "Advanced".equals(userObj.getStringValue("usertype"
))));
LOGGER.debug("isAdvancedAdmin: admin [" + isAdminUser() + "] global user ["
+ user.startsWith("xwiki:") + "] usertype [" + ((userObj != null
) ? userObj.getStringValue("usertype") : "null") + "] returning ["
+ isAdvancedAdmin + "] db [" + getContext().getDatabase() + "].");
return isAdvancedAdmin;
} catch (XWikiException exp) {
LOGGER.error("Failed to get user document for [" + user + "].", exp);
}
return false;
}
@SuppressWarnings("unchecked")
public List<Attachment> getAttachmentListSortedSpace(String spaceName,
String comparator, boolean imagesOnly, int start, int nb
) throws ClassNotFoundException {
List<Attachment> attachments = new ArrayList<Attachment>();
try {
for(String docName : getContext().getWiki().getSpaceDocsName(spaceName, getContext())) {
DocumentReference docRef = new DocumentReference(getContext().getDatabase(),
spaceName, docName);
XWikiDocument doc = getContext().getWiki().getDocument(docRef, getContext());
attachments.addAll(new Document(doc, getContext()).getAttachmentList());
}
} catch (XWikiException xwe) {
LOGGER.error("Could not get all documents in " + spaceName, xwe);
}
try {
Comparator<Attachment> comparatorClass =
(Comparator<Attachment>) Class.forName(
"com.celements.web.comparators." + comparator).newInstance();
Collections.sort(attachments, comparatorClass);
} catch (InstantiationException e) {
LOGGER.error(e);
} catch (IllegalAccessException e) {
LOGGER.error(e);
} catch (ClassNotFoundException e) {
throw e;
}
if (imagesOnly) {
for (Attachment att : new ArrayList<Attachment>(attachments)) {
if (!att.isImage()) {
attachments.remove(att);
}
}
}
return reduceListToSize(attachments, start, nb);
}
@SuppressWarnings("unchecked")
public List<Attachment> getAttachmentListSorted(Document doc,
String comparator) throws ClassNotFoundException {
List<Attachment> attachments = doc.getAttachmentList();
try {
Comparator<Attachment> comparatorClass =
(Comparator<Attachment>) Class.forName(
"com.celements.web.comparators." + comparator).newInstance();
Collections.sort(attachments, comparatorClass);
} catch (InstantiationException e) {
LOGGER.error(e);
} catch (IllegalAccessException e) {
LOGGER.error(e);
} catch (ClassNotFoundException e) {
throw e;
}
return attachments;
}
List<Attachment> reduceListToSize(List<Attachment> attachments, int start, int nb) {
List<Attachment> countedAtts = new ArrayList<Attachment>();
if((start <= 0) && ((nb <= 0) || (nb >= attachments.size()))) {
countedAtts = attachments;
} else if(start < attachments.size()) {
countedAtts = attachments.subList(Math.max(0, start), Math.min(Math.max(0, start)
+ Math.max(0, nb), attachments.size()));
}
return countedAtts;
}
public List<Attachment> getAttachmentListSorted(Document doc, String comparator,
boolean imagesOnly) {
return getAttachmentListSorted(doc, comparator, imagesOnly, 0, 0);
}
public List<Attachment> getAttachmentListSorted(Document doc, String comparator,
boolean imagesOnly, int start, int nb) {
try {
List<Attachment> attachments = getAttachmentListSorted(doc, comparator);
if (imagesOnly) {
for (Attachment att : new ArrayList<Attachment>(attachments)) {
if (!att.isImage()) {
attachments.remove(att);
}
}
}
return reduceListToSize(attachments, start, nb);
} catch (ClassNotFoundException exp) {
LOGGER.error(exp);
}
return Collections.emptyList();
}
public String getAttachmentListSortedAsJSON(Document doc, String comparator,
boolean imagesOnly) {
return getAttachmentListSortedAsJSON(doc, comparator, imagesOnly, 0, 0);
}
public String getAttachmentListSortedAsJSON(Document doc,
String comparator, boolean imagesOnly, int start, int nb) {
SimpleDateFormat dateFormater = new SimpleDateFormat("dd.MM.yyyy HH:mm:ss");
Builder jsonBuilder = new Builder();
jsonBuilder.openArray();
for (Attachment att : getAttachmentListSorted(doc, comparator, imagesOnly, start, nb)
) {
jsonBuilder.openDictionary();
jsonBuilder.addStringProperty("filename", att.getFilename());
jsonBuilder.addStringProperty("version", att.getVersion());
jsonBuilder.addStringProperty("author", att.getAuthor());
jsonBuilder.addStringProperty("mimeType", att.getMimeType());
jsonBuilder.addStringProperty("lastChanged",
dateFormater.format(att.getDate()));
jsonBuilder.addStringProperty("url",
doc.getAttachmentURL(att.getFilename()));
jsonBuilder.closeDictionary();
}
jsonBuilder.closeArray();
return jsonBuilder.getJSON();
}
Map<String, String> xwikiDoctoLinkedMap(XWikiDocument xwikiDoc,
boolean bWithObjects, boolean bWithRendering,
boolean bWithAttachmentContent, boolean bWithVersions) throws XWikiException {
Map<String,String> docData = new LinkedHashMap<String, String>();
DocumentReference docRef = xwikiDoc.getDocumentReference();
docData.put("web", docRef .getLastSpaceReference().getName());
docData.put("name", docRef.getName());
docData.put("language", xwikiDoc.getLanguage());
docData.put("defaultLanguage", xwikiDoc.getDefaultLanguage());
docData.put("translation", "" + xwikiDoc.getTranslation());
docData.put("defaultLanguage", xwikiDoc.getDefaultLanguage());
docData.put("parent", serializer_default.serialize(xwikiDoc.getParentReference()));
String parentsListStr = "";
String parentsListMNStr = "";
MultilingualMenuNameCommand menuNameCmd = new MultilingualMenuNameCommand();
for(DocumentReference parentDocRef : getDocumentParentsList(docRef, false)) {
String parentDocFN = serializer_default.serialize(parentDocRef);
parentsListMNStr += menuNameCmd.getMenuNameBaseObject(parentDocFN, getContext(
).getLanguage(), getContext()) + ",";
parentsListStr += parentDocFN + ",";
}
docData.put("parentslist", parentsListStr.replace(",+$", ""));
docData.put("parentslistmenuname", parentsListMNStr.replace(",+$", ""));
docData.put("creator", xwikiDoc.getCreator());
docData.put("author", xwikiDoc.getAuthor());
docData.put("creator", xwikiDoc.getCreator());
docData.put("customClass", xwikiDoc.getCustomClass());
docData.put("contentAuthor", xwikiDoc.getContentAuthor());
docData.put("creationDate", "" + xwikiDoc.getCreationDate().getTime());
docData.put("date", "" + xwikiDoc.getDate().getTime());
docData.put("contentUpdateDate", "" + xwikiDoc.getContentUpdateDate().getTime());
docData.put("version", xwikiDoc.getVersion());
docData.put("title", xwikiDoc.getTitle());
docData.put("template", serializer_local.serialize(
xwikiDoc.getTemplateDocumentReference()));
docData.put("getDefaultTemplate", xwikiDoc.getDefaultTemplate());
docData.put("getValidationScript", xwikiDoc.getValidationScript());
docData.put("comment", xwikiDoc.getComment());
docData.put("minorEdit", String.valueOf(xwikiDoc.isMinorEdit()));
docData.put("syntaxId", xwikiDoc.getSyntax().toIdString());
docData.put("menuName", new MultilingualMenuNameCommand().getMultilingualMenuName(
xwikiDoc.getXObject(getRef("Celements2", "MenuItem")),
xwikiDoc.getLanguage(), getContext()));
//docData.put("hidden", String.valueOf(xwikiDoc.isHidden()));
/** TODO add Attachments
for (XWikiAttachment attach : xwikiDoc.getAttachmentList()) {
docel.add(attach.toXML(bWithAttachmentContent, bWithVersions, context));
}**/
if (bWithObjects) {
// // Add Class
// BaseClass bclass = xwikiDoc.getxWikiClass();
// if (bclass.getFieldList().size() > 0) {
// // If the class has fields, add class definition and field information to XML
// docel.add(bclass.toXML(null));
// // Add Objects (THEIR ORDER IS MOLDED IN STONE!)
// for (Vector<BaseObject> objects : getxWikiObjects().values()) {
// for (BaseObject obj : objects) {
// if (obj != null) {
// BaseClass objclass = null;
// if (StringUtils.equals(getFullName(), obj.getClassName())) {
// objclass = bclass;
// } else {
// objclass = obj.getxWikiClass(context);
// docel.add(obj.toXML(objclass));
throw new NotImplementedException();
}
String host = getContext().getRequest().getHeader("host");
// Add Content
docData.put("content", replaceInternalWithExternalLinks(xwikiDoc.getContent(), host));
if (bWithRendering) {
try {
docData.put("renderedcontent", replaceInternalWithExternalLinks(
xwikiDoc.getRenderedContent(getContext()), host));
} catch (XWikiException exp) {
LOGGER.error("Exception with rendering content: ", exp);
}
try {
docData.put("celrenderedcontent", replaceInternalWithExternalLinks(
getCelementsRenderCmd().renderCelementsDocument(xwikiDoc.getDocumentReference(
), getContext().getLanguage(), "view"), host));
} catch (XWikiException exp) {
LOGGER.error("Exception with rendering content: ", exp);
}
}
if (bWithVersions) {
try {
docData.put("versions", xwikiDoc.getDocumentArchive(getContext()
).getArchive(getContext()));
} catch (XWikiException exp) {
LOGGER.error("Document [" + docRef.getName()
+ "] has malformed history", exp);
}
}
return docData;
}
private RenderCommand getCelementsRenderCmd() {
RenderCommand renderCommand = new RenderCommand();
renderCommand.setDefaultPageType("RichText");
return renderCommand;
}
String replaceInternalWithExternalLinks(String content, String host) {
String result = content.replaceAll("src=\\\"(\\.\\./)*/?download/", "src=\"http:
+ host + "/download/");
result = result.replaceAll("href=\\\"(\\.\\./)*/?download/", "href=\"http:
+ host + "/download/");
result = result.replaceAll("href=\\\"(\\.\\./)*/?skin/", "href=\"http:
+ host + "/skin/");
result = result.replaceAll("href=\\\"(\\.\\./)*/?view/", "href=\"http:
+ host + "/view/");
result = result.replaceAll("href=\\\"(\\.\\./)*/?edit/", "href=\"http:
+ host + "/edit/");
return result;
}
public String getJSONContent(XWikiDocument cdoc) {
Map<String, String> data;
try {
data = xwikiDoctoLinkedMap(cdoc.getTranslatedDocument(getContext()), false, true,
false, false);
} catch (XWikiException e) {
LOGGER.error(e);
data = Collections.emptyMap();
}
Builder jasonBuilder = new Builder();
jasonBuilder.openDictionary();
for (String key : data.keySet()) {
String value = data.get(key);
jasonBuilder.addStringProperty(key, value);
}
jasonBuilder.closeDictionary();
return jasonBuilder.getJSON();
}
public String getUserNameForDocRef(DocumentReference authDocRef) throws XWikiException {
XWikiDocument authDoc = getContext().getWiki().getDocument(authDocRef, getContext());
BaseObject authObj = authDoc.getXObject(getRef("XWiki","XWikiUsers"));
if(authObj!=null){
return authObj.getStringValue("last_name") + ", "
+ authObj.getStringValue("first_name");
} else{
return getAdminMessageTool().get("cel_ml_unknown_author");
}
}
public String getMajorVersion(XWikiDocument doc) {
String revision = "1";
if(doc!=null){
revision = doc.getVersion();
if(revision!=null
&& revision.trim().length()>0
&& revision.contains(".")){
revision = revision.split("\\.")[0];
}
}
return revision;
}
private DocumentReference getRef(String spaceName, String pageName){
return new DocumentReference(getContext().getDatabase(), spaceName, pageName);
}
public List<BaseObject> getObjectsOrdered(XWikiDocument doc, DocumentReference classRef,
String orderField, boolean asc) {
return getObjectsOrdered(doc, classRef, orderField, asc, null, false);
}
/**
* Get a list of Objects for a Document sorted by one or two fields.
*
* @param doc The Document where the Objects are attached.
* @param classRef The reference to the class of the Objects to return
* @param orderField1 Field to order the objects by. First priority.
* @param asc1 Order first priority ascending or descending.
* @param orderField2 Field to order the objects by. Second priority.
* @param asc2 Order second priority ascending or descending.
* @return List of objects ordered as specified
*/
public List<BaseObject> getObjectsOrdered(XWikiDocument doc, DocumentReference classRef,
String orderField1, boolean asc1, String orderField2, boolean asc2) {
List<BaseObject> resultList = new ArrayList<BaseObject>();
if(doc != null) {
List<BaseObject> allObjects = doc.getXObjects(classRef);
if(allObjects != null) {
for (BaseObject obj : allObjects) {
if(obj != null) {
resultList.add(obj);
}
}
}
Collections.sort(resultList, new BaseObjectComparator(orderField1, asc1,
orderField2, asc2));
}
return resultList;
}
public String[] splitStringByLength(String inStr, int maxLength) {
int numFullStr = (inStr.length() - 1) / maxLength;
String[] splitedStr = new String[1 + numFullStr];
for(int i = 0 ; i < numFullStr ; i ++) {
int startIndex = i * maxLength;
splitedStr[i] = inStr.substring(startIndex, startIndex + maxLength);
}
int lastPiece = splitedStr.length - 1;
splitedStr[lastPiece] = inStr.substring(lastPiece * maxLength,
inStr.length());
return splitedStr;
}
public WikiReference getWikiRef(DocumentReference docRef) {
return (WikiReference) docRef.getLastSpaceReference().getParent();
}
public DocumentReference getWikiTemplateDocRef() {
if (getContext().getRequest() != null) {
String templateFN = getContext().getRequest().get("template");
if ((templateFN != null) && !"".equals(templateFN.trim())) {
DocumentReference templateDocRef = resolveDocumentReference(templateFN);
if (getContext().getWiki().exists(templateDocRef, getContext())) {
return templateDocRef;
}
}
}
return null;
}
public XWikiDocument getWikiTemplateDoc() {
DocumentReference templateDocRef = getWikiTemplateDocRef();
if (templateDocRef != null) {
try {
return getContext().getWiki().getDocument(templateDocRef, getContext());
} catch (XWikiException exp) {
LOGGER.error("Exception while getting template doc '" + templateDocRef + "'",
exp);
}
}
return null;
}
public EntityReferenceSerializer<String> getRefDefaultSerializer() {
return serializer_default;
}
public EntityReferenceSerializer<String> getRefLocalSerializer() {
return serializer_local;
}
public Map<String, String[]> getRequestParameterMap() {
XWikiRequest request = getContext().getRequest();
if (request != null) {
Map<?, ?> requestMap = request.getParameterMap();
Map<String, String[]> convertedMap = new HashMap<String, String[]>();
for (Object keyObj : requestMap.keySet()) {
String key = keyObj.toString();
String[] value = getValueAsStringArray(requestMap.get(keyObj));
convertedMap.put(key, value);
}
return convertedMap;
} else {
return null;
}
}
private String[] getValueAsStringArray(Object value) {
if (value instanceof String) {
return new String[] { value.toString() };
} else if (value instanceof String[]) {
return (String[]) value;
} else {
throw new IllegalArgumentException("Invalid requestMap value type");
}
}
public String getInheritedTemplatedPath(DocumentReference localTemplateRef) {
if (localTemplateRef != null) {
String templatePath = getRefDefaultSerializer().serialize(localTemplateRef);
if (!getContext().getWiki().exists(localTemplateRef, getContext())) {
if (!"celements2web".equals(localTemplateRef.getLastSpaceReference().getParent(
).getName())
&& getContext().getWiki().exists(getCentralTemplateRef(localTemplateRef),
getContext())) {
templatePath = "celements2web:" + templatePath;
} else {
templatePath = ":" + templatePath.replaceAll("celements2web:", "");
}
}
return templatePath.replaceAll(getContext().getDatabase() + ":", "");
}
return null;
}
private DocumentReference getCentralTemplateRef(DocumentReference localTemplateRef) {
DocumentReference centralTemplateRef = new DocumentReference("celements2web",
localTemplateRef.getLastSpaceReference().getName(), localTemplateRef.getName());
return centralTemplateRef;
}
public void deleteDocument(XWikiDocument doc, boolean totrash) throws XWikiException {
/** deleteDocument in XWiki does NOT set context and store database to doc database
* Thus deleting the doc fails if it is not in the current context database. Hence we
* need to fix the context database before deleting.
*/
String dbBefore = getContext().getDatabase();
try {
getContext().setDatabase(doc.getDocumentReference().getLastSpaceReference().getParent(
).getName());
LOGGER.debug("deleteDocument: doc [" + getRefDefaultSerializer().serialize(
doc.getDocumentReference()) + "," + doc.getLanguage() + "] totrash [" + totrash
+ "] dbBefore [" + dbBefore + "] db now [" + getContext().getDatabase() + "].");
getContext().getWiki().deleteDocument(doc, totrash, getContext());
} finally {
getContext().setDatabase(dbBefore);
}
}
public void deleteAllDocuments(XWikiDocument doc, boolean totrash
) throws XWikiException {
// Delete all documents
for (String lang : doc.getTranslationList(getContext())) {
XWikiDocument tdoc = doc.getTranslatedDocument(lang, getContext());
deleteDocument(tdoc, totrash);
}
deleteDocument(doc, totrash);
}
public String getTemplatePathOnDisk(String renderTemplatePath) {
return getTemplatePathOnDisk(renderTemplatePath, null);
}
public String getTemplatePathOnDisk(String renderTemplatePath, String lang) {
for (Map.Entry<Object, Object> entry : tempPathConfig.getMappings().entrySet()) {
String pathName = (String) entry.getKey();
if (renderTemplatePath.startsWith(":" + pathName)) {
String newRenderTemplatePath = renderTemplatePath.replaceAll("^:(" + pathName
+ "\\.)?", "/templates/" + ((String) entry.getValue()) + "/")
+ getTemplatePathLangSuffix(lang) + ".vm";
LOGGER.debug("getTemplatePathOnDisk: for [" + renderTemplatePath + "] and lang ["
+ lang + "] returning [" + newRenderTemplatePath + "].");
return newRenderTemplatePath;
}
}
return renderTemplatePath;
}
private String getTemplatePathLangSuffix(String lang) {
if (lang != null) {
return "_" + lang;
}
return "";
}
public String renderInheritableDocument(DocumentReference docRef, String lang
) throws XWikiException {
return renderInheritableDocument(docRef, lang, null);
}
public String renderInheritableDocument(DocumentReference docRef, String lang,
String defLang) throws XWikiException {
RenderCommand renderCommand = new RenderCommand();
if (this.injectedRenderingEngine != null) {
renderCommand.setRenderingEngine(this.injectedRenderingEngine);
}
String templatePath = getInheritedTemplatedPath(docRef);
LOGGER.debug("renderInheritableDocument: call renderTemplatePath for ["
+ templatePath + "] and lang [" + lang + "] and defLang [" + defLang + "].");
return renderCommand.renderTemplatePath(templatePath, lang, defLang);
}
public boolean existsInheritableDocument(DocumentReference docRef, String lang) {
return existsInheritableDocument(docRef, lang, null);
}
public boolean existsInheritableDocument(DocumentReference docRef, String lang,
String defLang) {
String templatePath = getInheritedTemplatedPath(docRef);
LOGGER.debug("existsInheritableDocument: check content for templatePath ["
+ templatePath + "] and lang [" + lang + "] and defLang [" + defLang + "].");
if (templatePath.startsWith(":")) {
return !StringUtils.isEmpty(getTranslatedDiscTemplateContent(templatePath, lang,
defLang));
} else {
//Template must exist otherwise getInheritedTemplatedPath would have fallen back
//on disk template path.
return true;
}
}
private PageLayoutCommand getPageLayoutCmd() {
if (!getContext().containsKey(CelementsWebPluginApi.CELEMENTS_PAGE_LAYOUT_COMMAND)) {
getContext().put(CelementsWebPluginApi.CELEMENTS_PAGE_LAYOUT_COMMAND,
new PageLayoutCommand());
}
return (PageLayoutCommand) getContext().get(
CelementsWebPluginApi.CELEMENTS_PAGE_LAYOUT_COMMAND);
}
@Deprecated
public String cleanupXHTMLtoHTML5(String xhtml) {
return cleanupXHTMLtoHTML5(xhtml, getContext().getDoc().getDocumentReference());
}
@Deprecated
public String cleanupXHTMLtoHTML5(String xhtml, DocumentReference docRef) {
return cleanupXHTMLtoHTML5(xhtml, getPageLayoutCmd().getPageLayoutForDoc(docRef));
}
@Deprecated
public String cleanupXHTMLtoHTML5(String xhtml, SpaceReference layoutRef) {
BaseObject layoutObj = getPageLayoutCmd().getLayoutPropertyObj(layoutRef);
if((layoutObj != null) && "HTML 5".equals(layoutObj.getStringValue("doctype"))) {
XHTMLtoHTML5cleanup html5Cleaner = Utils.getComponent(XHTMLtoHTML5cleanup.class);
return html5Cleaner.cleanAll(xhtml);
}
return xhtml;
}
public String getTranslatedDiscTemplateContent(String renderTemplatePath, String lang,
String defLang) {
String templateContent;
List<String> langList = new ArrayList<String>();
if (lang != null) {
langList.add(lang);
}
if ((defLang != null) && !defLang.equals(lang)) {
langList.add(defLang);
}
templateContent = "";
for(String theLang : langList) {
String templatePath = getTemplatePathOnDisk(renderTemplatePath, theLang);
try {
templateContent = getContext().getWiki().getResourceContent(templatePath);
} catch (FileNotFoundException fnfExp) {
LOGGER.trace("FileNotFound [" + templatePath + "].");
templateContent = "";
} catch (IOException exp) {
LOGGER.debug("Exception while parsing template [" + templatePath + "].", exp);
templateContent = "";
}
}
if ("".equals(templateContent)) {
String templatePathDef = getTemplatePathOnDisk(renderTemplatePath);
try {
templateContent = getContext().getWiki().getResourceContent(templatePathDef);
} catch (FileNotFoundException fnfExp) {
LOGGER.trace("FileNotFound [" + templatePathDef + "].");
return "";
} catch (IOException exp) {
LOGGER.debug("Exception while parsing template [" + templatePathDef + "].",
exp);
return "";
}
}
return templateContent;
}
} |
package com.github.hi_fi.testapp;
import java.awt.FlowLayout;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTextField;
public class TestSwingApp {
private JFrame mainFrame;
private JLabel headerLabel;
private JLabel statusLabel;
private JPanel controlPanel;
private JPanel textPanel;
private JTextField textField;
public TestSwingApp() {
prepareGUI();
}
public static void main(String[] args) {
System.out.println("Starting test app");
TestSwingApp testSwingApp = new TestSwingApp();
testSwingApp.showEventDemo();
}
private void prepareGUI() {
mainFrame = new JFrame("Test application for Remote Sikuli Library");
mainFrame.setSize(400, 400);
mainFrame.setLayout(new GridLayout(4, 1));
headerLabel = new JLabel("", JLabel.CENTER);
statusLabel = new JLabel("", JLabel.CENTER);
statusLabel.setSize(350, 100);
mainFrame.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent windowEvent) {
System.exit(0);
}
});
controlPanel = new JPanel();
controlPanel.setLayout(new FlowLayout());
textPanel = new JPanel();
textPanel.setLayout(new FlowLayout());
mainFrame.add(headerLabel);
mainFrame.add(controlPanel);
mainFrame.add(statusLabel);
mainFrame.add(textPanel);
mainFrame.setVisible(true);
}
private void showEventDemo() {
headerLabel.setText("Control in action: Button");
JButton okButton = new JButton("OK");
JButton submitButton = new JButton("Submit");
JButton cancelButton = new JButton("Cancel");
JLabel label = new JLabel("Test field: ", JLabel.RIGHT);
textField = new JTextField(20);
okButton.setActionCommand("OK");
submitButton.setActionCommand("Submit");
cancelButton.setActionCommand("Cancel");
okButton.addActionListener(new ButtonClickListener());
submitButton.addActionListener(new ButtonClickListener());
cancelButton.addActionListener(new ButtonClickListener());
controlPanel.add(okButton);
controlPanel.add(submitButton);
controlPanel.add(cancelButton);
textPanel.add(label);
textPanel.add(textField);
mainFrame.setVisible(true);
}
private class ButtonClickListener implements ActionListener {
public void actionPerformed(ActionEvent e) {
String command = e.getActionCommand();
if (command.equals("OK")) {
statusLabel.setText("Ok Button clicked.");
} else if (command.equals("Submit")) {
statusLabel.setText("Submit Button clicked.");
} else {
statusLabel.setText("Cancel Button clicked.");
}
}
}
} |
package com.codeborne.selenide;
import com.codeborne.selenide.ex.UIAssertionError;
import com.codeborne.selenide.impl.*;
import com.codeborne.selenide.logevents.SelenideLog;
import com.codeborne.selenide.logevents.SelenideLogger;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.WebElement;
import java.util.*;
import static com.codeborne.selenide.Condition.not;
import static com.codeborne.selenide.Configuration.*;
import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.sleep;
import static com.codeborne.selenide.logevents.ErrorsCollector.validateAssertionMode;
import static com.codeborne.selenide.logevents.LogEvent.EventStatus.PASS;
import static java.util.stream.Collectors.toList;
public class ElementsCollection extends AbstractList<SelenideElement> {
private final WebElementsCollection collection;
private List<WebElement> actualElements;
private Exception lastError;
public ElementsCollection(WebElementsCollection collection) {
this.collection = collection;
}
/**
* Checks is the collection is of given size
*
* @param expectedSize
* @return ElementsCollection
*/
public ElementsCollection shouldHaveSize(int expectedSize) {
return shouldHave(CollectionCondition.size(expectedSize));
}
/**
* $$(".error").shouldBe(empty)
*/
public ElementsCollection shouldBe(CollectionCondition... conditions) {
return should("be", conditions);
}
/**
* $$(".error").shouldHave(size(3))
* $$(".error").shouldHave(texts("Error1", "Error2"))
*/
public ElementsCollection shouldHave(CollectionCondition... conditions) {
return should("have", conditions);
}
protected ElementsCollection should(String prefix, CollectionCondition... conditions) {
validateAssertionMode();
SelenideLog log = SelenideLogger.beginStep(collection.description(), "should " + prefix, conditions);
try {
for (CollectionCondition condition : conditions) {
waitUntil(condition, collectionsTimeout);
}
SelenideLogger.commitStep(log, PASS);
return this;
}
catch (Error error) {
SelenideLogger.commitStep(log, error);
switch (assertionMode) {
case SOFT:
return this;
default:
throw UIAssertionError.wrap(error, collectionsTimeout);
}
}
catch (RuntimeException e) {
SelenideLogger.commitStep(log, e);
throw e;
}
}
protected void waitUntil(CollectionCondition condition, long timeoutMs) {
lastError = null;
final long startTime = System.currentTimeMillis();
boolean conditionMatched = false;
do {
try {
actualElements = collection.getActualElements();
if (condition.apply(actualElements)) {
if (conditionMatched) {
return;
} else {
conditionMatched = true;
sleep(collectionsPollingInterval);
continue;
}
} else {
conditionMatched = false;
}
} catch (WebDriverException elementNotFound) {
lastError = elementNotFound;
if (Cleanup.of.isInvalidSelectorError(elementNotFound)) {
throw Cleanup.of.wrap(elementNotFound);
}
}
sleep(collectionsPollingInterval);
}
while (System.currentTimeMillis() - startTime < timeoutMs);
if (!condition.apply(actualElements)) {
condition.fail(collection, actualElements, lastError, timeoutMs);
}
}
/**
* Filters collection elements based on the given condition
* @param condition
* @return ElementsCollection
*/
public ElementsCollection filter(Condition condition) {
return new ElementsCollection(new FilteringCollection(collection, condition));
}
/**
* Filters collection elements based on the given condition
* @see #filter(Condition)
* @param condition
* @return ElementsCollection
*/
public ElementsCollection filterBy(Condition condition) {
return filter(condition);
}
/**
* Filters elements excluding those which met the given condition
* @param condition
* @return ElementsCollection
*/
public ElementsCollection exclude(Condition condition) {
return new ElementsCollection(new FilteringCollection(collection, not(condition)));
}
/**
* Filters elements excluding those which met the given condition
* @see #exclude(Condition)
* @param condition
* @return ElementsCollection
*/
public ElementsCollection excludeWith(Condition condition) {
return exclude(condition);
}
/**
* Find the first element which met the given condition
* @param condition
* @return SelenideElement
*/
public SelenideElement find(Condition condition) {
return filter(condition).get(0);
}
/**
* Find the first element which met the given condition
* @see #find(Condition)
* @param condition
* @return SelenideElement
*/
public SelenideElement findBy(Condition condition) {
return find(condition);
}
private List<WebElement> getActualElements() {
if (actualElements == null) {
actualElements = collection.getActualElements();
}
return actualElements;
}
/**
* Gets all the texts in elements collection
* @return array of texts
*/
public List<String> texts() {
return texts(getActualElements());
}
/**
* @deprecated Use method com.codeborne.selenide.ElementsCollection#texts() that returns List instead of array
*/
@Deprecated
public String[] getTexts() {
return getTexts(getActualElements());
}
/**
* Fail-safe method for retrieving texts of given elements.
* @param elements Any collection of WebElements
* @return Array of texts (or exceptions in case of any WebDriverExceptions)
*/
public static List<String> texts(Collection<WebElement> elements) {
return elements.stream().map(e -> getText(e)).collect(toList());
}
/**
* @deprecated Use method com.codeborne.selenide.ElementsCollection#texts(java.util.Collection)
* that returns List instead of array
*/
@Deprecated
public static String[] getTexts(Collection<WebElement> elements) {
String[] texts = new String[elements.size()];
int i = 0;
for (WebElement element : elements) {
texts[i++] = getText(element);
}
return texts;
}
private static String getText(WebElement element) {
try {
return element.getText();
} catch (WebDriverException elementDisappeared) {
return elementDisappeared.toString();
}
}
/**
* Outputs string presentation of the element's collection
* @param elements
* @return String
*/
public static String elementsToString(Collection<WebElement> elements) {
if (elements == null) {
return "[not loaded yet...]";
}
if (elements.isEmpty()) {
return "[]";
}
StringBuilder sb = new StringBuilder(256);
sb.append("[\n\t");
for (WebElement element : elements) {
if (sb.length() > 4) {
sb.append(",\n\t");
}
sb.append($(element));
}
sb.append("\n]");
return sb.toString();
}
@Override
public SelenideElement get(int index) {
return CollectionElement.wrap(collection, index);
}
/**
* return the first element of the collection
* @return
*/
public SelenideElement first() {
return get(0);
}
/**
* return the last element of the collection
* @return
*/
public SelenideElement last() {
return get(size() - 1);
}
@Override
public int size() {
return getActualElements().size();
}
@Override
public Iterator<SelenideElement> iterator() {
return new SelenideElementIterator(collection);
}
@Override
public ListIterator<SelenideElement> listIterator(int index) {
return new SelenideElementListIterator(collection, index);
}
@Override
public String toString() {
try {
return elementsToString(getActualElements());
} catch (Exception e) {
return String.format("[%s]", Cleanup.of.webdriverExceptionMessage(e));
}
}
} |
package com.networknt.schema;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import io.undertow.Undertow;
import io.undertow.server.handlers.resource.FileResourceManager;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import java.io.File;
import java.io.InputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import static io.undertow.Handlers.resource;
import static org.junit.Assert.assertEquals;
public class V6JsonSchemaTest {
protected ObjectMapper mapper = new ObjectMapper();
protected JsonSchemaFactory validatorFactory = JsonSchemaFactory.builder(JsonSchemaFactory.getInstance(SpecVersion.VersionFlag.V6)).objectMapper(mapper).build();
protected static Undertow server = null;
public V6JsonSchemaTest() {
}
@BeforeClass
public static void setUp() {
if(server == null) {
server = Undertow.builder()
.addHttpListener(1234, "localhost")
.setHandler(resource(new FileResourceManager(
new File("./src/test/resources/draft6"), 100)))
.build();
server.start();
}
}
@AfterClass
public static void tearDown() throws Exception {
if(server != null) {
try {
Thread.sleep(100);
} catch (InterruptedException ignored) {
}
server.stop();
}
}
private void runTestFile(String testCaseFile) throws Exception {
final URI testCaseFileUri = URI.create("classpath:" + testCaseFile);
InputStream in = Thread.currentThread().getContextClassLoader()
.getResourceAsStream(testCaseFile);
ArrayNode testCases = mapper.readValue(in, ArrayNode.class);
for (int j = 0; j < testCases.size(); j++) {
try {
JsonNode testCase = testCases.get(j);
SchemaValidatorsConfig config = new SchemaValidatorsConfig();
ArrayNode testNodes = (ArrayNode) testCase.get("tests");
for (int i = 0; i < testNodes.size(); i++) {
JsonNode test = testNodes.get(i);
JsonNode node = test.get("data");
JsonNode typeLooseNode = test.get("isTypeLoose");
// Configure the schemaValidator to set typeLoose's value based on the test file,
// if test file do not contains typeLoose flag, use default value: true.
config.setTypeLoose((typeLooseNode == null) ? false : typeLooseNode.asBoolean());
JsonSchema schema = validatorFactory.getSchema(testCaseFileUri, testCase.get("schema"), config);
List<ValidationMessage> errors = new ArrayList<ValidationMessage>();
errors.addAll(schema.validate(node));
if (test.get("valid").asBoolean()) {
if (!errors.isEmpty()) {
System.out.println("
System.out.println("schema: " + schema.toString());
System.out.println("data: " + test.get("data"));
}
assertEquals(0, errors.size());
} else {
if (errors.isEmpty()) {
System.out.println("
System.out.println("schema: " + schema);
System.out.println("data: " + test.get("data"));
} else {
JsonNode errorCount = test.get("errorCount");
if (errorCount != null && errorCount.isInt() && errors.size() != errorCount.asInt()) {
System.out.println("
System.out.println("schema: " + schema);
System.out.println("data: " + test.get("data"));
System.out.println("errors: " + errors);
assertEquals("expected error count", errorCount.asInt(), errors.size());
}
}
assertEquals(false, errors.isEmpty());
}
}
} catch (JsonSchemaException e) {
throw new IllegalStateException(String.format("Current schema should not be invalid: %s", testCaseFile), e);
}
}
}
@Test
public void testOptionalBignumValidator() throws Exception {
runTestFile("draft6/optional/bignum.json");
}
@Test
@Ignore
public void testEcmascriptRegexValidator() throws Exception {
runTestFile("draft6/optional/ecmascript-regex.json");
}
@Test
@Ignore
public void testZeroTerminatedFloatsValidator() throws Exception {
runTestFile("draft6/optional/zeroTerminatedFloats.json");
}
@Test
@Ignore
public void testOptionalFormatValidator() throws Exception {
runTestFile("draft6/optional/format.json");
}
@Test
public void testAdditionalItemsValidator() throws Exception {
runTestFile("draft6/additionalItems.json");
}
@Test
public void testAdditionalPropertiesValidator() throws Exception {
runTestFile("draft6/additionalProperties.json");
}
@Test
public void testAllOfValidator() throws Exception {
runTestFile("draft6/allOf.json");
}
@Test
public void testAnyOfValidator() throws Exception {
runTestFile("draft6/anyOf.json");
}
@Test
public void testBooleanSchemaValidator() throws Exception {
runTestFile("draft6/boolean_schema.json");
}
@Test
public void testConstValidator() throws Exception {
runTestFile("draft6/const.json");
}
@Test
@Ignore
public void testContainsValidator() throws Exception {
runTestFile("draft6/contains.json");
}
@Test
public void testDefaultValidator() throws Exception {
runTestFile("draft6/default.json");
}
@Test
public void testDefinitionsValidator() throws Exception {
runTestFile("draft6/definitions.json");
}
@Test
@Ignore
public void testDependenciesValidator() throws Exception {
runTestFile("draft6/dependencies.json");
}
@Test
public void testEnumValidator() throws Exception {
runTestFile("draft6/enum.json");
}
@Test
public void testExclusiveMaximumValidator() throws Exception {
runTestFile("draft6/exclusiveMaximum.json");
}
@Test
public void testExclusiveMinimumValidator() throws Exception {
runTestFile("draft6/exclusiveMinimum.json");
}
@Test
public void testFormatValidator() throws Exception {
runTestFile("draft6/format.json");
}
@Test
public void testItemsValidator() throws Exception {
runTestFile("draft6/items.json");
}
@Test
public void testMaximumValidator() throws Exception {
runTestFile("draft6/maximum.json");
}
@Test
public void testMaxItemsValidator() throws Exception {
runTestFile("draft6/maxItems.json");
}
@Test
public void testMaxLengthValidator() throws Exception {
runTestFile("draft6/maxLength.json");
}
@Test
public void testMaxPropertiesValidator() throws Exception {
runTestFile("draft6/maxProperties.json");
}
@Test
public void testMinimumValidator() throws Exception {
runTestFile("draft6/minimum.json");
}
@Test
public void testMinItemsValidator() throws Exception {
runTestFile("draft6/minItems.json");
}
@Test
public void testMinLengthValidator() throws Exception {
runTestFile("draft6/minLength.json");
}
@Test
public void testMinPropertiesValidator() throws Exception {
runTestFile("draft6/minProperties.json");
}
@Test
public void testMultipleOfValidator() throws Exception {
runTestFile("draft6/multipleOf.json");
}
@Test
public void testNotValidator() throws Exception {
runTestFile("draft6/not.json");
}
@Test
public void testOneOfValidator() throws Exception {
runTestFile("draft6/oneOf.json");
}
@Test
public void testPatternValidator() throws Exception {
runTestFile("draft6/pattern.json");
}
@Test
public void testPatternPropertiesValidator() throws Exception {
runTestFile("draft6/patternProperties.json");
}
@Test
public void testPropertiesValidator() throws Exception {
runTestFile("draft6/properties.json");
}
@Test
@Ignore
public void testPropertyNamesValidator() throws Exception {
runTestFile("draft6/propertyNames.json");
}
@Test
@Ignore
public void testRefValidator() throws Exception {
runTestFile("draft6/ref.json");
}
@Test
@Ignore
public void testRefRemoteValidator() throws Exception {
runTestFile("draft6/refRemote.json");
}
@Test
public void testRequiredValidator() throws Exception {
runTestFile("draft6/required.json");
}
@Test
public void testTypeValidator() throws Exception {
runTestFile("draft6/type.json");
}
@Test
public void testUniqueItemsValidator() throws Exception {
runTestFile("draft6/uniqueItems.json");
}
} |
package com.commafeed.backend.model;
import java.io.Serializable;
import javax.persistence.Cacheable;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
@Entity
@Table(name = "FEED_FEEDENTRIES")
@SuppressWarnings("serial")
@Cacheable
@Cache(usage = CacheConcurrencyStrategy.TRANSACTIONAL)
public class FeedFeedEntry implements Serializable {
@Id
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "FEED_ID")
private Feed feed;
@Id
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "FEEDENTRY_ID")
private FeedEntry entry;
public FeedFeedEntry() {
}
public FeedFeedEntry(Feed feed, FeedEntry entry) {
this.feed = feed;
this.entry = entry;
}
public Feed getFeed() {
return feed;
}
public void setFeed(Feed feed) {
this.feed = feed;
}
public FeedEntry getEntry() {
return entry;
}
public void setEntry(FeedEntry entry) {
this.entry = entry;
}
} |
package com.epam.ta.reportportal.job;
import com.epam.ta.reportportal.database.dao.*;
import com.epam.ta.reportportal.database.entity.item.TestItem;
import com.epam.ta.reportportal.database.entity.project.KeepLogsDelay;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.time.Duration;
import java.time.Instant;
import java.util.Date;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.epam.ta.reportportal.database.entity.project.KeepLogsDelay.findByName;
import static com.epam.ta.reportportal.job.PageUtil.iterateOverPages;
import static java.time.Duration.ofDays;
/**
* Clean logs job in accordance with project settings
*
* @author Andrei Varabyeu
* @author Pavel Borntik
*/
@Service
public class CleanLogsJob implements Job {
public static final int DEFAULT_THREAD_COUNT = 5;
public static final long JOB_EXECUTION_TIMEOUT = 1L;
private static final Duration MIN_DELAY = Duration.ofDays(KeepLogsDelay.TWO_WEEKS.getDays() - 1);
private static final Logger LOGGER = LoggerFactory.getLogger(CleanLogsJob.class);
@Autowired
private LogRepository logRepo;
@Autowired
private LaunchRepository launchRepo;
@Autowired
private TestItemRepository testItemRepo;
@Autowired
private ProjectRepository projectRepository;
@Autowired
private ActivityRepository activityRepository;
@Autowired
@Value("${com.ta.reportportal.job.clean.logs.threads:5}")
private Integer threadsCount;
@Override
public void execute(JobExecutionContext context) {
LOGGER.debug("Cleaning outdated logs has been started");
ExecutorService executor = Executors.newFixedThreadPool(Optional.ofNullable(threadsCount).orElse(DEFAULT_THREAD_COUNT),
new ThreadFactoryBuilder().setNameFormat("clean-logs-job-thread-%d").build()
);
iterateOverPages(projectRepository::findAllIdsAndConfiguration, projects -> projects.forEach(project -> {
executor.submit(() -> {
try {
LOGGER.info("Cleaning outdated logs for project {} has been started", project.getId());
Duration period = ofDays(findByName(project.getConfiguration().getKeepLogs()).getDays());
if (!period.isZero()) {
activityRepository.deleteModifiedLaterAgo(project.getId(), period);
removeOutdatedLogs(project.getId(), period);
}
} catch (Exception e) {
LOGGER.debug("Cleaning outdated logs for project {} has been failed", project.getId(), e);
}
LOGGER.info("Cleaning outdated logs for project {} has been finished", project.getId());
});
}));
executor.shutdown();
try {
LOGGER.info("Awaiting cleaning outdated screenshot to finish");
executor.awaitTermination(JOB_EXECUTION_TIMEOUT, TimeUnit.DAYS);
} catch (InterruptedException e) {
throw new RuntimeException("Job Execution timeout exceeded", e);
}
}
private void removeOutdatedLogs(String projectId, Duration period) {
Date endDate = Date.from(Instant.now().minusSeconds(MIN_DELAY.getSeconds()));
AtomicLong countPerProject = new AtomicLong(0);
iterateOverPages(pageable -> launchRepo.findModifiedBefore(projectId, endDate, pageable), launches -> {
launches.forEach(launch -> {
try (Stream<TestItem> testItemStream = testItemRepo.streamIdsByLaunch(launch.getId())) {
long count = logRepo.deleteByPeriodAndItemsRef(period,
testItemStream.map(TestItem::getId).collect(Collectors.toList())
);
countPerProject.addAndGet(count);
} catch (Exception e) {
//do nothing
}
});
});
LOGGER.info("Removed {} logs for project {}", countPerProject.get(), projectId);
}
} |
package moe.pine.bottler;
import org.apache.commons.io.IOUtils;
import org.junit.Assert;
import org.junit.Test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.*;
import java.util.List;
public class CookieStoreUtilsTest {
@Test
public void testConstructor() {
// Bean needs default constructor
new CookieStoreUtils();
}
@Test
public void testSaveTo() throws URISyntaxException, IOException {
CookieManager cookieManager =
new CookieManager(null, CookiePolicy.ACCEPT_ALL);
CookieManager newCookieManager =
new CookieManager(null, CookiePolicy.ACCEPT_ALL);
CookieStore cookieStore = cookieManager.getCookieStore();
CookieStore newCookieStore = newCookieManager.getCookieStore();
URI uri = new URI("http:
HttpCookie cookie = new HttpCookie("hello", "world");
cookieStore.add(uri, cookie);
ByteArrayOutputStream os = new ByteArrayOutputStream();
CookieStoreUtils.writeTo(cookieStore, os);
cookieStore.removeAll();
ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray());
IOUtils.closeQuietly(os);
CookieStoreUtils.readFrom(newCookieStore, is);
IOUtils.closeQuietly(is);
List<HttpCookie> saveCookie = newCookieStore.get(uri);
Assert.assertEquals("hello", saveCookie.get(0).getName());
Assert.assertEquals("world", saveCookie.get(0).getValue());
}
} |
package net.sf.gaboto.test;
import net.sf.gaboto.Gaboto;
import net.sf.gaboto.GabotoSnapshot;
import net.sf.gaboto.SPARQLQuerySolutionProcessorImpl;
import net.sf.gaboto.node.GabotoEntity;
import net.sf.gaboto.node.pool.EntityPool;
import net.sf.gaboto.node.pool.EntityPoolConfiguration;
import net.sf.gaboto.time.TimeInstant;
import net.sf.gaboto.util.GabotoPredefinedQueries;
import net.sf.gaboto.vocabulary.OxPointsVocab;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import uk.ac.ox.oucs.oxpoints.OxpointsFactory;
import uk.ac.ox.oucs.oxpoints.gaboto.entities.College;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.vocabulary.DC_11;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
public class TestGabotoSnapshot {
@BeforeClass
public static void setUp() throws Exception {
}
@AfterClass
public static void tearDown() throws Exception {
}
@Test
public void testSPARQLSelect() {
Gaboto oxp = OxpointsFactory.getOxpointsFromXML();
GabotoSnapshot nowSnap = oxp.getSnapshot(TimeInstant.now());
EntityPoolConfiguration config = new EntityPoolConfiguration(nowSnap);
config.addAcceptedType(OxPointsVocab.College_URI);
config.setAddReferencedEntitiesToPool(false);
EntityPool pool = EntityPool.createFrom(config);
for(GabotoEntity e : pool.getEntities()){
College col = (College) e;
final String name = col.getName();
final String uri = col.getUri();
String query = GabotoPredefinedQueries.getStandardPrefixes();
query += "SELECT ?name WHERE { <" + uri + "> dc:title ?name . }";
nowSnap.execSPARQLSelect(query, new SPARQLQuerySolutionProcessorImpl(){
private int i = 1;
public void processSolution(QuerySolution solution) {
assertEquals(1, i);
i++;
assertEquals(name, solution.getLiteral("name").getValue());
}
});
}
}
@Test
public void testSPARQLAsk() {
Gaboto oxp = OxpointsFactory.getOxpointsFromXML();
GabotoSnapshot nowSnap = oxp.getSnapshot(TimeInstant.now());
EntityPoolConfiguration config = new EntityPoolConfiguration(nowSnap);
config.addAcceptedType(OxPointsVocab.College_URI);
config.setAddReferencedEntitiesToPool(false);
EntityPool pool = EntityPool.createFrom(config);
for(GabotoEntity e : pool.getEntities()){
College col = (College) e;
final String name = col.getName();
final String uri = col.getUri();
String query = GabotoPredefinedQueries.getStandardPrefixes();
query += "ASK { <" + uri + "> dc:title \"" + name + "\" . }";
assertTrue(nowSnap.execSPARQLAsk(query));
}
}
@Test
public void testSPARQLDescribe() {
Gaboto oxp = OxpointsFactory.getOxpointsFromXML();
GabotoSnapshot nowSnap = oxp.getSnapshot(TimeInstant.now());
EntityPoolConfiguration config = new EntityPoolConfiguration(nowSnap);
config.addAcceptedType(OxPointsVocab.College_URI);
EntityPool pool = EntityPool.createFrom(config);
assertTrue("it is " + pool.getSize(), pool.getSize() != 0);
GabotoSnapshot collegeSnap = pool.createSnapshot();
String query = GabotoPredefinedQueries.getStandardPrefixes();
query += "PREFIX oxp: <" + OxPointsVocab.NS + ">\n";
query += "DESCRIBE ?x WHERE { ?x rdf:type oxp:College }";
GabotoSnapshot describedSnap = nowSnap.execSPARQLDescribe(query);
Model m1 = describedSnap.getModel();
Model m2 = collegeSnap.getModel();
StmtIterator it = m1.listStatements();
while(it.hasNext()){
Statement stmt = it.nextStatement();
if(! m2.contains(stmt))
System.out.println(stmt);
}
assertEquals(describedSnap.size(),collegeSnap.size());
}
@Test
public void testLoadEntities(){
Gaboto oxp = OxpointsFactory.getOxpointsFromXML();
GabotoSnapshot nowSnap = oxp.getSnapshot(TimeInstant.now());
EntityPool pool = nowSnap.loadEntitiesWithProperty(DC_11.title, "Somerville College");
assertTrue("it is " + pool.getSize(), pool.getSize() == 1);
}
/**
* This test fails every time oxpoints data is changed.
*/
@Test
public void testTimeSnapshots(){
Gaboto oxp = OxpointsFactory.getOxpointsFromXML();
GabotoSnapshot nowSnap = oxp.getSnapshot(TimeInstant.now());
GabotoSnapshot whenGreenWasExtant = oxp.getSnapshot(new TimeInstant(new Integer(1980), new Integer(11), new Integer(2)));
GabotoSnapshot beforeGreenExisted = oxp.getSnapshot(new TimeInstant(new Integer(1978), new Integer(11), new Integer(2)));
assertTrue("Input data has changed? Snapshot now contains " + nowSnap.size(), nowSnap.size() == 14906);
assertFalse(nowSnap.containsResource("http://m.ox.ac.uk/oxpoints/id/23232362"));
assertTrue(nowSnap.containsResource("http://m.ox.ac.uk/oxpoints/id/23232369"));
assertTrue("Input data has changed? Snapshot(1980) now contains " + whenGreenWasExtant.size(), whenGreenWasExtant.size() == 14905);
assertTrue(whenGreenWasExtant.containsResource("http://m.ox.ac.uk/oxpoints/id/23232362"));
assertTrue(whenGreenWasExtant.containsResource("http://m.ox.ac.uk/oxpoints/id/23232369"));
assertTrue("Input data has changed? Snapshot(1978) now contains " + beforeGreenExisted.size(), beforeGreenExisted.size() == 14883);
assertFalse(beforeGreenExisted.containsResource("http://m.ox.ac.uk/oxpoints/id/23232362"));
assertTrue(beforeGreenExisted.containsResource("http://m.ox.ac.uk/oxpoints/id/23232369"));
}
} |
package notpure.antlr4.macro;
import notpure.antlr4.macro.model.lang.ExpressionValue;
import notpure.antlr4.macro.model.lang.ExpressionValueType;
import notpure.antlr4.macro.model.parser.ParserExceptionListener;
import notpure.antlr4.macro.model.lang.Expression;
import notpure.antlr4.macro.model.lang.ExpressionType;
import notpure.antlr4.macro.model.lexer.token.Token;
import notpure.antlr4.macro.processor.lexer.SimpleLexer;
import notpure.antlr4.macro.processor.parser.SimpleParser;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import static notpure.antlr4.macro.model.lang.ExpressionValueType.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* A set of tests for {@link SimpleParser}. These tests rely on {@link SimpleLexerTest} passing.
*/
public final class SimpleParserTest {
/**
* Parses the input value into a list of {@link Expression} and ensures that the size is 1 and the only element
* matches the expectedStatement.
*/
private static void assertSingleStatement(String input, Expression expectedExpression) {
// Generate statements
List<Expression> output = statements(input);
// Assert values
assertEquals(1, output.size());
assertEquals(expectedExpression, output.get(0));
}
/**
* Parses the input value into a list of {@link Expression} and ensures that the size is 1 and the only element
* matches the expectedStatement.
*/
private static void assertDoubleStatement(String input, Expression expectedExpr1, Expression expectedExpr2) {
// Generate statements
List<Expression> output = statements(input);
// Assert values
assertEquals(2, output.size());
assertEquals(expectedExpr1, output.get(0));
assertEquals(expectedExpr2, output.get(1));
}
/**
* Parses the input value into a list of {@link Expression} and ensures that {@link SimpleParser#isErrorOccurred()}
* is true.
*/
private static void assertParsingError(String input) {
SimpleParser sp = (SimpleParser)new SimpleParser(new ParserExceptionListener.ParserExceptionNop())
.parse(tokens("grammar myGrammar"));
assertTrue(sp.isErrorOccurred());
}
/**
* Generates a list of {@link Expression} from the given input.
*/
private static List<Expression> statements(String input) {
List<Token> tokens = tokens(input);
return new SimpleParser().parse(tokens).getExpressions();
}
/**
* Generates a list of {@link Token} from the given input.
*/
private static List<Token> tokens(String input) {
return new SimpleLexer().tokenize(input).getTokens();
}
@Test
public void parserTestOfMacroRuleDefinitions() {
final ExpressionType type = ExpressionType.MACRO_RULE;
final ExpressionValue helloString = new ExpressionValue(STRING, "HELLO");
final Expression expr1 = new Expression(type, "HELLO_WORLD", helloString);
final Expression expr2 = new Expression(type, "hELLO290woRld", helloString);
final Expression expr3 = new Expression(type, "HELLO290woRld", helloString);
assertSingleStatement("#P:w;", new Expression(type, "P", new ExpressionValue(RULE_REFERENCE, "w")));
assertSingleStatement("#HELLO290woRld:'HELLO';", new Expression(type, "HELLO290woRld", helloString));
assertSingleStatement("#HELLO:HELLO;",
new Expression(type, "HELLO", new ExpressionValue(RULE_REFERENCE, "HELLO")));
assertSingleStatement("#HELLO_WORLD :'HELLO';", expr1);
assertSingleStatement("#HELLO_WORLD : 'HELLO' ;", expr1);
assertSingleStatement("#HELLO_WORLD : 'HELLO' ;", expr1);
assertSingleStatement("#HELLO_WORLD: 'HELLO' ;", expr1);
assertSingleStatement("#hELLO290woRld : 'HELLO' ;", expr2);
assertSingleStatement("#hELLO290woRld: 'HELLO' ;", expr2);
assertSingleStatement("#HELLO290woRld\r\n: \r\n'HELLO' ;", expr3);
assertSingleStatement("#HELLO290woRld : 'HELLO' ;", expr3);
assertSingleStatement("#HELLO290woRld :'HELLO\r\n|WORLD';",
new Expression(type, "HELLO290woRld", new ExpressionValue(STRING, "HELLO\r\n|WORLD")));
// UTF-8 in string
assertSingleStatement("
new Expression(type, "HELLO", new ExpressionValue(STRING, "HǺLLO")));
// Compound values
final List<ExpressionValue> expressionValues = new ArrayList<>();
expressionValues.add(new ExpressionValue(RULE_REFERENCE, "HELLO"));
expressionValues.add(new ExpressionValue(ALTERNATOR, "|"));
expressionValues.add(new ExpressionValue(RULE_REFERENCE, "WORLD"));
assertSingleStatement("#HELLO290woRld : \r\nHELLO\r\n|WORLD;",
new Expression(type, "HELLO290woRld", expressionValues));
}
@Test
public void parserTestOfParserRuleDefinitions() {
final ExpressionType type = ExpressionType.PARSER_RULE;
final ExpressionValue helloString = new ExpressionValue(STRING, "HELLO");
final Expression expr1 = new Expression(type, "helloWorld", new ExpressionValue(STRING, "HELLO"));
assertSingleStatement("p:w;", new Expression(type, "p", new ExpressionValue(RULE_REFERENCE, "w")));
assertSingleStatement("hello2903:'HELLO';", new Expression(type, "hello2903", helloString));
assertSingleStatement("hello:HELLO;",
new Expression(type, "hello", new ExpressionValue(RULE_REFERENCE, "HELLO")));
assertSingleStatement("helloWorld :'HELLO';", expr1);
assertSingleStatement("helloWorld : 'HELLO' ;", expr1);
assertSingleStatement("helloWorld : 'HELLO' ;", expr1);
assertSingleStatement("helloWorld: 'HELLO' ;", expr1);
assertSingleStatement("helloWorld\r\n: \r\n'HELLO' ;", expr1);
assertSingleStatement("helloWorld : 'HELLO' ;", expr1);
assertSingleStatement("helloWorld :'HELLO\r\n|WORLD';",
new Expression(type, "helloWorld", new ExpressionValue(STRING, "HELLO\r\n|WORLD")));
// UTF-8 in string
assertSingleStatement("hello:'HǺLLO';",
new Expression(type, "hello", new ExpressionValue(STRING, "HǺLLO")));
// Compound values
final List<ExpressionValue> expressionValues = new ArrayList<>();
expressionValues.add(new ExpressionValue(RULE_REFERENCE, "HELLO"));
expressionValues.add(new ExpressionValue(ALTERNATOR, "|"));
expressionValues.add(new ExpressionValue(RULE_REFERENCE, "WORLD"));
assertSingleStatement("helloWorld :HELLO\r\n|WORLD;", new Expression(type, "helloWorld", expressionValues));
}
@Test
public void parserTestOfLexerRuleDefinitions() {
final ExpressionType type = ExpressionType.LEXER_RULE;
final ExpressionValue helloString = new ExpressionValue(STRING, "HELLO");
final Expression expr1 = new Expression(type, "HELLOWORLD", helloString);
assertSingleStatement("P:w;", new Expression(type, "P", new ExpressionValue(RULE_REFERENCE, "w")));
assertSingleStatement("HELLO290woRld:'HELLO';", new Expression(type, "HELLO290woRld", helloString));
assertSingleStatement("HELLO:HELLO;",
new Expression(type, "HELLO", new ExpressionValue(RULE_REFERENCE, "HELLO")));
assertSingleStatement("HELLOWORLD :'HELLO';", expr1);
assertSingleStatement("HELLOWORLD : 'HELLO' ;", expr1);
assertSingleStatement("HELLOWORLD : 'HELLO' ;", expr1);
assertSingleStatement("HELLOWORLD: 'HELLO' ;", expr1);
assertSingleStatement("HELLOWORLD\r\n: \r\n'HELLO' ;", expr1);
assertSingleStatement("HELLOWORLD : 'HELLO' ;", expr1);
assertSingleStatement("HELLOWORLD :'HELLO\r\n|WORLD';",
new Expression(type, "HELLOWORLD", new ExpressionValue(STRING, "HELLO\r\n|WORLD")));
// UTF-8 in string
assertSingleStatement("HELLO:'HǺLLO';",
new Expression(type, "HELLO", new ExpressionValue(STRING, "HǺLLO")));
// Compound values
final List<ExpressionValue> expressionValues = new ArrayList<>();
expressionValues.add(new ExpressionValue(RULE_REFERENCE, "HELLO"));
expressionValues.add(new ExpressionValue(ALTERNATOR, "|"));
expressionValues.add(new ExpressionValue(RULE_REFERENCE, "WORLD"));
assertSingleStatement("HELLOWORLD :HELLO\r\n|WORLD;", new Expression(type, "HELLOWORLD", expressionValues));
}
@Test
public void parserTestOfFileHeaderDefinitions() {
final ExpressionType type = ExpressionType.GRAMMAR_NAME;
assertSingleStatement("grammar myGrammar;", new Expression(type, new ExpressionValue(RAW, "myGrammar")));
assertSingleStatement("grammar myGrammar2;", new Expression(type, new ExpressionValue(RAW, "myGrammar2")));
assertSingleStatement("grammar 2;", new Expression(type, new ExpressionValue(RAW, "2")));
assertSingleStatement("grammar m;", new Expression(type, new ExpressionValue(RAW, "m")));
assertSingleStatement("grammar\r\nmyGrammar;", new Expression(type, new ExpressionValue(RAW, "myGrammar")));
assertSingleStatement("grammar\nmyGrammar;", new Expression(type, new ExpressionValue(RAW, "myGrammar")));
assertSingleStatement("grammar \r\n m;", new Expression(type, new ExpressionValue(RAW, "m")));
assertSingleStatement("grammar \n m;", new Expression(type, new ExpressionValue(RAW, "m")));
}
@Test
public void parserTestOfMultiLineComment() {
final ExpressionType type = ExpressionType.MULTI_LINE_COMMENT;
assertSingleStatement("/*my comment*/", new Expression(type, new ExpressionValue(RAW, "my comment")));
assertSingleStatement("/* my comment */", new Expression(type, new ExpressionValue(RAW, " my comment ")));
assertSingleStatement("/*/* my comment */", new Expression(type, new ExpressionValue(RAW, "/* my comment ")));
assertSingleStatement("/*\r\nmy\r\ncomment\r\n*/", new Expression(type, new ExpressionValue(RAW, "\r\nmy\r\ncomment\r\n")));
assertSingleStatement("", new Expression(type, new ExpressionValue(RAW, "")));
}
@Test
public void parserTestOfInlineElements() {
// Statements
final Expression slComExpr1 = new Expression(ExpressionType.SINGLE_LINE_COMMENT, new ExpressionValue(RAW, "comment"));
final Expression slComExpr2 = new Expression(ExpressionType.SINGLE_LINE_COMMENT, new ExpressionValue(RAW, " comment"));
final Expression mlComExpr1 = new Expression(ExpressionType.MULTI_LINE_COMMENT, new ExpressionValue(RAW, "comment"));
final Expression mlComExpr2 = new Expression(ExpressionType.MULTI_LINE_COMMENT, new ExpressionValue(RAW, " comment"));
final Expression mlComExpr3 = new Expression(ExpressionType.MULTI_LINE_COMMENT, new ExpressionValue(RAW, "comment "));
final Expression mlComExpr4 = new Expression(ExpressionType.MULTI_LINE_COMMENT, new ExpressionValue(RAW, " comment "));
final Expression grmExpr = new Expression(ExpressionType.GRAMMAR_NAME, new ExpressionValue(RAW, "HelloWorld"));
final Expression prsrExpr = new Expression(ExpressionType.PARSER_RULE, "hello", new ExpressionValue(RULE_REFERENCE, "WORLD"));
final Expression lxrExpr = new Expression(ExpressionType.LEXER_RULE, "HELLO", new ExpressionValue(RULE_REFERENCE, "WORLD"));
// Tests
assertDoubleStatement("grammar HelloWorld;//comment", grmExpr, slComExpr1);
assertDoubleStatement("grammar HelloWorld;// comment", grmExpr, slComExpr2);
assertDoubleStatement("grammar HelloWorld; //comment", grmExpr, slComExpr1);
assertDoubleStatement("grammar HelloWorld; // comment", grmExpr, slComExpr2);
assertDoubleStatement("grammar HelloWorld;/*comment*/", grmExpr, mlComExpr1);
assertDoubleStatement("grammar HelloWorld;/* comment*/", grmExpr, mlComExpr2);
assertDoubleStatement("grammar HelloWorld;/*comment */", grmExpr, mlComExpr3);
assertDoubleStatement("grammar HelloWorld;/* comment */", grmExpr, mlComExpr4);
assertDoubleStatement("grammar HelloWorld; /*comment*/", grmExpr, mlComExpr1);
assertDoubleStatement("grammar HelloWorld; /* comment*/", grmExpr, mlComExpr2);
assertDoubleStatement("grammar HelloWorld; /*comment */", grmExpr, mlComExpr3);
assertDoubleStatement("grammar HelloWorld; /* comment */", grmExpr, mlComExpr4);
assertDoubleStatement("hello: WORLD;//comment", prsrExpr, slComExpr1);
assertDoubleStatement("hello: WORLD;// comment", prsrExpr, slComExpr2);
assertDoubleStatement("hello: WORLD; //comment", prsrExpr, slComExpr1);
assertDoubleStatement("hello: WORLD; // comment", prsrExpr, slComExpr2);
assertDoubleStatement("hello: WORLD;/*comment*/", prsrExpr, mlComExpr1);
assertDoubleStatement("hello: WORLD;/* comment*/", prsrExpr, mlComExpr2);
assertDoubleStatement("hello: WORLD;/*comment */", prsrExpr, mlComExpr3);
assertDoubleStatement("hello: WORLD;/* comment */", prsrExpr, mlComExpr4);
assertDoubleStatement("hello: WORLD; /*comment*/", prsrExpr, mlComExpr1);
assertDoubleStatement("hello: WORLD; /* comment*/", prsrExpr, mlComExpr2);
assertDoubleStatement("hello: WORLD; /*comment */", prsrExpr, mlComExpr3);
assertDoubleStatement("hello: WORLD; /* comment */", prsrExpr, mlComExpr4);
assertDoubleStatement("/*comment*/hello: WORLD;", mlComExpr1, prsrExpr);
assertDoubleStatement("/* comment*/hello: WORLD;", mlComExpr2, prsrExpr);
assertDoubleStatement("/*comment */hello: WORLD;", mlComExpr3, prsrExpr);
assertDoubleStatement("/* comment */hello: WORLD;", mlComExpr4, prsrExpr);
assertDoubleStatement("/*comment*/ hello: WORLD;", mlComExpr1, prsrExpr);
assertDoubleStatement("/* comment*/ hello: WORLD;", mlComExpr2, prsrExpr);
assertDoubleStatement("/*comment */ hello: WORLD;", mlComExpr3, prsrExpr);
assertDoubleStatement("/* comment */ hello: WORLD;", mlComExpr4, prsrExpr);
assertDoubleStatement("HELLO: WORLD;//comment", lxrExpr, slComExpr1);
assertDoubleStatement("HELLO: WORLD;// comment", lxrExpr, slComExpr2);
assertDoubleStatement("HELLO: WORLD; //comment", lxrExpr, slComExpr1);
assertDoubleStatement("HELLO: WORLD; // comment", lxrExpr, slComExpr2);
assertDoubleStatement("HELLO: WORLD;/*comment*/", lxrExpr, mlComExpr1);
assertDoubleStatement("HELLO: WORLD;/* comment*/", lxrExpr, mlComExpr2);
assertDoubleStatement("HELLO: WORLD;/*comment */", lxrExpr, mlComExpr3);
assertDoubleStatement("HELLO: WORLD;/* comment */", lxrExpr, mlComExpr4);
assertDoubleStatement("HELLO: WORLD; /*comment*/", lxrExpr, mlComExpr1);
assertDoubleStatement("HELLO: WORLD; /* comment*/", lxrExpr, mlComExpr2);
assertDoubleStatement("HELLO: WORLD; /*comment */", lxrExpr, mlComExpr3);
assertDoubleStatement("HELLO: WORLD; /* comment */", lxrExpr, mlComExpr4);
assertDoubleStatement("HELLO: WORLD; HELLO: WORLD;", lxrExpr, lxrExpr);
assertDoubleStatement("HELLO:WORLD; HELLO: WORLD;", lxrExpr, lxrExpr);
assertDoubleStatement("HELLO: WORLD; HELLO:WORLD;", lxrExpr, lxrExpr);
assertDoubleStatement("HELLO:WORLD; HELLO:WORLD;", lxrExpr, lxrExpr);
assertDoubleStatement("HELLO:WORLD;HELLO:WORLD;", lxrExpr, lxrExpr);
assertDoubleStatement("HELLO: WORLD; hello: WORLD;", lxrExpr, prsrExpr);
assertDoubleStatement("HELLO:WORLD; hello: WORLD;", lxrExpr, prsrExpr);
assertDoubleStatement("HELLO: WORLD; hello:WORLD;", lxrExpr, prsrExpr);
assertDoubleStatement("HELLO:WORLD;hello: WORLD;", lxrExpr, prsrExpr);
assertDoubleStatement("HELLO: WORLD;hello:WORLD;", lxrExpr, prsrExpr);
assertDoubleStatement("HELLO:WORLD;hello:WORLD;", lxrExpr, prsrExpr);
assertDoubleStatement("hello: WORLD; HELLO: WORLD;", prsrExpr, lxrExpr);
assertDoubleStatement("hello:WORLD; HELLO: WORLD;", prsrExpr, lxrExpr);
assertDoubleStatement("hello: WORLD; HELLO:WORLD;", prsrExpr, lxrExpr);
assertDoubleStatement("hello:WORLD;HELLO: WORLD;", prsrExpr, lxrExpr);
assertDoubleStatement("hello: WORLD;HELLO:WORLD;", prsrExpr, lxrExpr);
assertDoubleStatement("hello:WORLD;HELLO:WORLD;", prsrExpr, lxrExpr);
assertDoubleStatement("/* comment *//* comment */", mlComExpr4, mlComExpr4);
assertDoubleStatement("/* comment*//*comment*/", mlComExpr2, mlComExpr1);
assertDoubleStatement("/*comment *//*comment*/", mlComExpr3, mlComExpr1);
assertDoubleStatement("/*comment*//* comment*/", mlComExpr1, mlComExpr2);
assertDoubleStatement("/*comment*//*comment */", mlComExpr1, mlComExpr3);
assertDoubleStatement("/*comment*/ /*comment*/", mlComExpr1, mlComExpr1);
assertDoubleStatement("/* comment */ /* comment */", mlComExpr4, mlComExpr4);
assertDoubleStatement("/* comment*/ /*comment*/", mlComExpr2, mlComExpr1);
assertDoubleStatement("/*comment */ /*comment*/", mlComExpr3, mlComExpr1);
assertDoubleStatement("/*comment*/ /* comment*/", mlComExpr1, mlComExpr2);
assertDoubleStatement("/*comment*/ /*comment */", mlComExpr1, mlComExpr3);
}
@Test(expected = IllegalArgumentException.class)
public void parserTestOfNullInput() {
new SimpleParser().parse(null);
}
@Test
public void parserTestOfInvalidGrammarNameParsing() {
assertParsingError("grammar myGrammar");
assertParsingError("grammarmyGrammar;");
assertParsingError("grammar ;");
}
@Test
public void parserTestOfInvalidLexerRuleParsing() {
assertParsingError("HELLO WORLD;");
assertParsingError("HELLO:WORLD");
assertParsingError("HELLO:;");
}
@Test
public void parserTestOfInvalidMacroRuleParsing() {
assertParsingError("#HELLO WORLD;");
assertParsingError("#HELLO:WORLD");
assertParsingError("#HELLO:;");
}
@Test
public void parserTestOfInvalidParserRuleParsing() {
assertParsingError("hello world;");
assertParsingError("hello:world");
assertParsingError("hello:;");
}
} |
package org.jgroups.ping.kube.test;
import static org.junit.Assert.assertEquals;
import java.util.List;
import org.jgroups.protocols.kubernetes.Client;
import org.jgroups.protocols.kubernetes.Pod;
import org.junit.Assert;
import org.junit.Test;
/**
* @author <a href="mailto:ulrich.romahn@gmail.com">Ulrich Romahn</a>
*/
public class StatusTest {
@Test
public void testPodsRunning() throws Exception {
Client client = new TestClient("/complex_pods.json");
List<Pod> pods = client.getPods(null, null, false);
Assert.assertNotNull(pods);
assertEquals(4, pods.size());
String pod = pods.get(0).getIp();
Assert.assertNotNull(pod);
}
@Test
public void testOnePodNotRunning() throws Exception {
final String jsonFile = "/unknown_pods.json";
Client client = new TestClient(jsonFile);
List<Pod> pods = client.getPods(null, null, false);
Assert.assertNotNull(pods);
assertEquals(2, pods.size());
String pod = pods.get(0).getIp();
Assert.assertNotNull(pod);
}
} |
package com.jomofisher.cmakeify;
import com.jomofisher.cmakeify.CMakeify.OSType;
import com.jomofisher.cmakeify.model.*;
import java.io.*;
import java.util.*;
public class BashScriptBuilder extends ScriptBuilder {
final private static String ABORT_LAST_FAILED = "rc=$?; if [[ $rc != 0 ]]; then exit -$rc; fi";
final private static String TOOLS_FOLDER = ".cmakeify/tools";
final private static String DOWNLOADS_FOLDER = ".cmakeify/downloads";
final private StringBuilder body = new StringBuilder();
final private Map<String, String> zips = new HashMap<>();
final private OSType hostOS;
final private File workingFolder;
final private File rootBuildFolder;
final private File zipsFolder;
final private File cdepFile;
final private File androidFolder;
final private String targetGroupId;
final private String targetArtifactId;
final private String targetVersion;
final private Set<File> outputLocations = new HashSet<>();
final private PrintStream out;
final private OS specificTargetOS;
BashScriptBuilder(PrintStream out,
OSType hostOS,
File workingFolder,
String targetGroupId,
String targetArtifactId,
String targetVersion,
OS specificTargetOS) {
this.out = out;
this.hostOS = hostOS;
this.workingFolder = workingFolder;
this.rootBuildFolder = new File(workingFolder, "build");
this.zipsFolder = new File(rootBuildFolder, "zips");
if (specificTargetOS == null) {
this.cdepFile = new File(zipsFolder, "cdep-manifest.yml");
} else {
this.cdepFile = new File(zipsFolder, String.format("cdep-manifest-%s.yml", specificTargetOS));
}
this.androidFolder = new File(rootBuildFolder, "Android");
this.targetGroupId = targetGroupId;
this.targetArtifactId = targetArtifactId;
this.targetVersion = targetVersion;
this.specificTargetOS = specificTargetOS;
}
private BashScriptBuilder body(String format, Object... args) {
String write = String.format(format + "\n", args);
if (write.contains(">")) {
throw new RuntimeException(write);
}
if (write.contains("<")) {
throw new RuntimeException(write);
}
if (write.contains("&")) {
throw new RuntimeException(write);
}
body.append(write);
return this;
}
private BashScriptBuilder bodyWithRedirect(String format, Object... args) {
String write = String.format(format + "\n", args);
if (!write.contains(">")) {
throw new RuntimeException(write);
}
if (write.contains("<")) {
throw new RuntimeException(write);
}
body.append(write);
return this;
}
private BashScriptBuilder cdep(String format, Object... args) {
String embed = String.format(format, args);
body.append(String.format("printf \"%%s\\r\\n\" \"%s\" >> %s \n", embed, cdepFile));
return this;
}
private void recordOutputLocation(File folder) {
out.printf("Writing to %s\n", folder);
if (this.outputLocations.contains(folder)) {
throw new RuntimeException(String.format("Output location %s written twice", folder));
}
try {
File canonical = folder.getCanonicalFile();
if (this.outputLocations.contains(canonical)) {
throw new RuntimeException(String.format("Output location %s written twice", folder));
}
this.outputLocations.add(folder);
this.outputLocations.add(canonical);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
ScriptBuilder createEmptyBuildFolder(HardNameDependency dependencies[]) {
body("cdep=$(pwd)/cdep");
body("echo Using cdep at ${cdep}");
body("rm -rf %s", rootBuildFolder);
body("mkdir -p %s", zipsFolder);
body("mkdir -p %s/", TOOLS_FOLDER);
body("mkdir -p %s/", DOWNLOADS_FOLDER);
cdep("# Generated by CMakeify");
cdep("coordinate:");
cdep(" groupId: %s", targetGroupId);
cdep(" artifactId: %s", targetArtifactId);
cdep(" version: %s", targetVersion);
if (dependencies != null && dependencies.length > 0) {
cdep("dependencies:");
for (HardNameDependency dependency : dependencies) {
cdep(" - compile: %s", dependency.compile);
cdep(" sha256: %s", dependency.sha256);
}
}
return this;
}
private ArchiveUrl getHostArchive(RemoteArchive remote) {
switch (hostOS) {
case Linux:
return remote.linux;
case MacOS:
return remote.darwin;
}
throw new RuntimeException(hostOS.toString());
}
@Override
ScriptBuilder download(RemoteArchive remote) {
ArchiveInfo archive = new ArchiveInfo(getHostArchive(remote));
return bodyWithRedirect(archive.downloadToFolder(DOWNLOADS_FOLDER)).bodyWithRedirect(archive.uncompressToFolder(
DOWNLOADS_FOLDER,
TOOLS_FOLDER));
}
@Override
File writeToShellScript() {
BufferedWriter writer = null;
File file = new File(".cmakeify/build.sh");
file.getAbsoluteFile().mkdirs();
file.delete();
try {
writer = new BufferedWriter(new FileWriter(file));
writer.write(body.toString());
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
// Close the writer regardless of what happens...
writer.close();
} catch (Exception e) {
}
}
return file;
}
@Override
ScriptBuilder checkForCompilers(Collection<String> compilers) {
for (String compiler : compilers) {
body("if [[ -z \"$(which %s)\" ]]; then", compiler);
body(" echo CMAKEIFY ERROR: Missing %s. Please install.", compiler);
body(" exit -110");
body("fi");
}
return this;
}
@Override
ScriptBuilder cmakeAndroid(String cmakeVersion,
RemoteArchive cmakeRemote,
String target,
String cmakeFlags,
String flavor,
String flavorFlags,
String ndkVersion,
RemoteArchive ndkRemote,
String includes[],
String lib,
String compiler,
String runtime,
String platform,
String abi,
boolean multipleFlavors,
boolean multipleCMake,
boolean multipleNDK,
boolean multipleCompiler,
boolean multipleRuntime,
boolean multiplePlatforms,
boolean multipleAbi) {
body("echo Executing script for %s %s %s %s %s %s %s", flavor, ndkVersion, platform, compiler, runtime, target, abi);
if (lib != null && lib.length() > 0) {
throw new RuntimeException("lib is no longer supported, use buildTarget");
}
if (target != null && target.length() > 0 && lib != null && lib.length() > 0) {
throw new RuntimeException("cmakify.yml has both lib and target, only one is allowed");
}
if (target != null && target.length() > 0 && (lib == null || lib.length() == 0)) {
lib = String.format("lib%s.a", target);
}
if (cmakeFlags == null) {
cmakeFlags = "";
}
String cmakeExe = String.format("%s/%s/bin/cmake", TOOLS_FOLDER, getHostArchive(cmakeRemote).unpackroot);
File outputFolder = androidFolder;
String zipName = targetArtifactId + "-android";
if (multipleCMake) {
outputFolder = new File(outputFolder, "cmake-" + cmakeVersion);
zipName += "-cmake-" + cmakeVersion;
}
if (multipleNDK) {
outputFolder = new File(outputFolder, ndkVersion);
zipName += "-" + ndkVersion;
}
if (multipleCompiler) {
outputFolder = new File(outputFolder, compiler);
zipName += "-" + compiler;
}
if (multipleRuntime) {
String fixedRuntime = runtime.replace('+', 'x');
outputFolder = new File(outputFolder, fixedRuntime);
zipName += "-" + fixedRuntime;
}
if (multiplePlatforms) {
outputFolder = new File(outputFolder, "android-" + platform);
zipName += "-platform-" + platform;
}
if (multipleFlavors) {
outputFolder = new File(outputFolder, "flavor-" + flavor);
zipName += "-" + flavor;
}
if (multipleAbi) {
outputFolder = new File(outputFolder, "abi-" + abi);
zipName += "-" + abi;
}
zipName += ".zip";
File zip = new File(zipsFolder, zipName).getAbsoluteFile();
File headers = new File(zipsFolder, "headers.zip").getAbsoluteFile();
recordOutputLocation(zip);
File buildFolder = new File(outputFolder, "cmake-generated-files");
String ndkFolder = String.format("%s/%s", TOOLS_FOLDER, getHostArchive(ndkRemote).unpackroot);
File redistFolder = new File(outputFolder, "redist").getAbsoluteFile();
File headerFolder = new File(outputFolder, "header").getAbsoluteFile();
File stagingFolder = new File(outputFolder, "staging").getAbsoluteFile();
File abiBuildFolder = new File(buildFolder, abi);
File archFolder = new File(String.format("%s/platforms/android-%s/arch-%s",
new File(ndkFolder).getAbsolutePath(),
platform,
Abi.getByName(abi).getArchitecture()));
body("if [ -d '%s' ]; then", archFolder);
body(" echo Creating make project in %s", abiBuildFolder);
File stagingAbiFolder = new File(String.format("%s/lib/%s", stagingFolder, abi));
recordOutputLocation(stagingAbiFolder);
String command = String.format("%s \\\n" +
" -H%s \\\n" +
" -B%s \\\n" +
" -DCMAKE_ANDROID_NDK_TOOLCHAIN_VERSION=%s \\\n" +
" -DCMAKE_ANDROID_NDK_TOOLCHAIN_DEBUG=1 \\\n" +
" -DCMAKE_SYSTEM_NAME=Android \\\n" +
" -DCMAKE_SYSTEM_VERSION=%s \\\n" +
" -DCMAKEIFY_REDIST_INCLUDE_DIRECTORY=%s/include \\\n" +
" -DCMAKE_LIBRARY_OUTPUT_DIRECTORY=%s \\\n" +
" -DCMAKE_ARCHIVE_OUTPUT_DIRECTORY=%s \\\n" +
" -DCMAKE_ANDROID_STL_TYPE=%s_static \\\n" +
" -DCMAKE_ANDROID_NDK=%s \\\n" +
" -DCMAKE_ANDROID_ARCH_ABI=%s %s %s\n",
cmakeExe,
workingFolder,
abiBuildFolder,
compiler,
platform,
headerFolder,
stagingAbiFolder,
stagingAbiFolder,
runtime,
new File(ndkFolder).getAbsolutePath(),
abi,
flavorFlags,
cmakeFlags);
body(" echo Executing %s", command);
body(" " + command);
body(" " + ABORT_LAST_FAILED);
if (target != null && target.length() > 0) {
body(String.format(" %s --build %s --target %s -- -j8", cmakeExe, abiBuildFolder, target));
} else {
body(String.format(" %s --build %s -- -j8", cmakeExe, abiBuildFolder));
}
body(" " + ABORT_LAST_FAILED);
String stagingLib = String.format("%s/%s", stagingAbiFolder, lib);
File redistAbiFolder = new File(String.format("%s/lib/%s", redistFolder, abi));
recordOutputLocation(redistAbiFolder);
if (lib != null && lib.length() > 0) {
body(" if [ -f '%s' ]; then", stagingLib);
body(" mkdir -p %s", redistAbiFolder);
body(" cp %s %s/%s", stagingLib, redistAbiFolder, lib);
body(" " + ABORT_LAST_FAILED);
body(" else");
body(" echo CMAKEIFY ERROR: CMake build did not produce %s", stagingLib);
body(" exit -100");
body(" fi");
} else {
body(" echo cmakeify.yml did not specify lib or target. No output library expected.");
}
body("else");
body(" echo Build skipped ABI %s because arch folder didnt exist: %s", abi, archFolder);
body("fi");
zips.put(zip.getAbsolutePath(), redistFolder.getPath());
body("if [ -d '%s' ]; then", stagingFolder);
// Create a folder with something in it so there'e always something to zip
body(" mkdir -p %s", redistFolder);
bodyWithRedirect(" echo Android %s %s %s %s %s %s > %s/cmakeify.txt",
cmakeVersion,
flavor,
ndkVersion,
platform,
compiler,
runtime,
redistFolder);
writeExtraIncludesToBody(includes, headerFolder);
writeCreateZipFromRedistFolderToBody(zip, redistFolder);
writeCreateHeaderZip(headers, headerFolder);
writeZipFileStatisticsToBody(zip);
cdep(" - lib: %s", lib);
cdep(" file: %s", zip.getName());
cdep(" sha256: $SHASUM256");
cdep(" size: $ARCHIVESIZE");
if (multipleFlavors) {
cdep(" flavor: %s", flavor);
}
cdep(" runtime: %s", runtime);
cdep(" platform: %s", platform);
cdep(" ndk: %s", ndkVersion);
cdep(" abi: %s", abi);
if (multipleCompiler) {
cdep(" compiler: %s", compiler);
}
if (multipleCMake) {
cdep(" builder: cmake-%s", cmakeVersion);
}
body("fi");
return this;
}
private void writeCreateHeaderZip(File headers, File headerFolder) {
body(" if [ -d '%s' ]; then", headerFolder);
writeCreateZipFromRedistFolderToBody(headers, headerFolder);
body(" else");
body(" echo CMAKEIFY ERROR: Header folder %s was not found", headerFolder);
body(" exit -699");
body(" fi");
}
private void writeZipFileStatisticsToBody(File zip) {
body(" SHASUM256=$(shasum -a 256 %s | awk '{print $1}')", zip);
body(" " + ABORT_LAST_FAILED);
body(" ARCHIVESIZE=$(ls -l %s | awk '{print $5}')", zip);
body(" " + ABORT_LAST_FAILED);
}
@Override
ScriptBuilder cmakeLinux(String cmakeVersion,
RemoteArchive cmakeRemote,
String target,
String cmakeFlags,
Toolset toolset,
String lib,
boolean multipleCMake,
boolean multipleCompiler) {
if (target != null && target.length() > 0 && lib != null && lib.length() > 0) {
throw new RuntimeException("cmakify.yml has both lib and target, only one is allowed");
}
if (target != null && target.length() > 0 && (lib == null || lib.length() == 0)) {
lib = String.format("lib%s.a", target);
}
if (cmakeFlags == null) {
cmakeFlags = "";
}
String cmakeExe = String.format("%s/%s/bin/cmake", TOOLS_FOLDER, getHostArchive(cmakeRemote).unpackroot);
File outputFolder = new File(rootBuildFolder, "Linux");
String zipName = targetArtifactId + "-linux";
if (multipleCMake) {
outputFolder = new File(outputFolder, "cmake-" + cmakeVersion);
zipName += "-cmake-" + cmakeVersion;
}
if (multipleCompiler) {
outputFolder = new File(outputFolder, toolset.c);
zipName += "-" + toolset.c;
}
zipName += ".zip";
File zip = new File(zipsFolder, zipName).getAbsoluteFile();
File headers = new File(zipsFolder, "headers.zip").getAbsoluteFile();
File buildFolder = new File(outputFolder, "cmake-generated-files");
File headerFolder = new File(outputFolder, "header").getAbsoluteFile();
File redistFolder = new File(outputFolder, "redist").getAbsoluteFile();
body("echo Building to %s", outputFolder);
body("mkdir -p %s/include", redistFolder);
recordOutputLocation(zip);
recordOutputLocation(outputFolder);
recordOutputLocation(redistFolder);
body(String.format("%s \\\n" +
" -H%s \\\n" +
" -B%s \\\n" +
" -DCMAKEIFY_REDIST_INCLUDE_DIRECTORY=%s/include \\\n" +
" -DCMAKE_LIBRARY_OUTPUT_DIRECTORY=%s/lib \\\n" +
" -DCMAKE_ARCHIVE_OUTPUT_DIRECTORY=%s/lib \\\n" +
" -DCMAKE_SYSTEM_NAME=Linux \\\n" +
" -DCMAKE_C_COMPILER=%s \\\n" +
" -DCMAKE_CXX_COMPILER=%s %s",
cmakeExe,
workingFolder,
buildFolder,
headerFolder,
redistFolder,
redistFolder,
toolset.c,
toolset.cxx,
cmakeFlags));
if (target != null && target.length() > 0) {
body(String.format("%s --build %s --target %s -- -j8", cmakeExe, buildFolder, target));
} else {
body(String.format("%s --build %s -- -j8", cmakeExe, buildFolder));
}
body(ABORT_LAST_FAILED);
zips.put(zip.getAbsolutePath(), redistFolder.getPath());
body("# Zip Linux redist if folder was created in %s", redistFolder);
body("if [ -d '%s' ]; then", redistFolder);
body(" if [ -f '%s' ]; then", zip);
body(" echo CMAKEIFY ERROR: Linux zip %s would be overwritten", zip);
body(" exit -500");
body(" fi");
writeCreateZipFromRedistFolderToBody(zip, redistFolder);
writeCreateHeaderZip(headers, headerFolder);
writeZipFileStatisticsToBody(zip);
body(" " + ABORT_LAST_FAILED);
cdep(" - lib: %s", lib);
cdep(" file: %s", zip.getName());
cdep(" sha256: $SHASUM256");
cdep(" size: $ARCHIVESIZE");
body("else");
body(" echo CMAKEIFY ERROR: Did not create %s", redistFolder);
body(" exit -520");
body("fi");
return this;
}
@Override
ScriptBuilder cmakeiOS(String cmakeVersion,
RemoteArchive cmakeRemote,
String target,
String cmakeFlags,
String flavor,
String flavorFlags,
String includes[],
String lib,
iOSPlatform platform,
iOSArchitecture architecture,
String sdk,
boolean multipleFlavor,
boolean multipleCMake,
boolean multiplePlatform,
boolean multipleArchitecture,
boolean multipleSdk) {
if (target != null && target.length() > 0 && lib != null && lib.length() > 0) {
throw new RuntimeException("cmakify.yml has both lib and target, only one is allowed");
}
if (target != null && target.length() > 0 && (lib == null || lib.length() == 0)) {
lib = String.format("lib%s.a", target);
}
if (cmakeFlags == null) {
cmakeFlags = "";
}
if (!isSupportediOSPlatformArchitecture(platform, architecture)) {
out.printf("Skipping iOS %s %s because it isn't supported by XCode\n", platform, architecture);
return this;
}
String cmakeExe = String.format("%s/%s/bin/cmake", TOOLS_FOLDER, getHostArchive(cmakeRemote).unpackroot);
File outputFolder = new File(rootBuildFolder, "iOS");
String zipName = targetArtifactId + "-ios";
if (multipleCMake) {
outputFolder = new File(outputFolder, "cmake-" + cmakeVersion);
zipName += "-cmake-" + cmakeVersion;
}
if (multipleFlavor) {
outputFolder = new File(outputFolder, "flavor-" + flavor);
zipName += "-" + flavor;
}
if (multiplePlatform) {
outputFolder = new File(outputFolder, "platform-" + platform.toString());
zipName += "-platform-" + platform.toString();
}
if (multipleArchitecture) {
outputFolder = new File(outputFolder, "architecture-" + architecture.toString());
zipName += "-architecture-" + architecture.toString();
}
if (multipleSdk) {
outputFolder = new File(outputFolder, "sdk-" + sdk);
zipName += "-sdk-" + sdk;
}
zipName += ".zip";
File zip = new File(zipsFolder, zipName).getAbsoluteFile();
File headers = new File(zipsFolder, "headers.zip").getAbsoluteFile();
File buildFolder = new File(outputFolder, "cmake-generated-files");
File headerFolder = new File(outputFolder, "header").getAbsoluteFile();
File redistFolder = new File(outputFolder, "redist").getAbsoluteFile();
File stagingFolder = new File(outputFolder, "staging").getAbsoluteFile();
if (hostOS != OSType.MacOS) {
body("echo No XCode available. NOT building to %s", outputFolder);
} else {
body("CDEP_IOS_CLANG=$(xcrun -sdk iphoneos -find clang)");
body("CDEP_IOS_AR=$(xcrun -sdk iphoneos -find ar)");
body("CDEP_XCODE_DEVELOPER_DIR=$(xcode-select -print-path)");
body("CDEP_IOS_DEVELOPER_ROOT=${CDEP_XCODE_DEVELOPER_DIR}/Platforms/%s.platform/Developer", platform);
body("CDEP_IOS_SDK_ROOT=${CDEP_IOS_DEVELOPER_ROOT}/SDKs/%s%s.sdk", platform, sdk);
body("if [ ! -d \"${CDEP_IOS_SDK_ROOT}\" ]; then");
body(" echo Not building for non-existent SDK root ${CDEP_IOS_SDK_ROOT}. Listing available:");
body(" ls ${CDEP_IOS_DEVELOPER_ROOT}/SDKs");
body("else");
body(" echo Building to %s", outputFolder);
body(" mkdir -p %s/include", redistFolder);
}
recordOutputLocation(zip);
recordOutputLocation(outputFolder);
recordOutputLocation(redistFolder);
recordOutputLocation(stagingFolder);
String command = String.format("%s \\\n" +
" -H%s \\\n" +
" -B%s \\\n" +
" -DCMAKE_C_COMPILER=${CDEP_IOS_CLANG}\\\n" +
" -DCMAKE_CXX_COMPILER=${CDEP_IOS_CLANG} \\\n" +
" -DCMAKE_C_COMPILER_WORKS=1 \\\n" +
" -DCMAKE_CXX_COMPILER_WORKS=1 \\\n" +
" -DCMAKE_AR=${CDEP_IOS_AR}\\\n" +
" -DCMAKE_OSX_SYSROOT=${CDEP_IOS_SDK_ROOT} \\\n" +
" -DCMAKE_OSX_ARCHITECTURES=%s \\\n" +
" -DCMAKEIFY_REDIST_INCLUDE_DIRECTORY=%s/include \\\n" +
" -DCMAKE_LIBRARY_OUTPUT_DIRECTORY=%s/lib \\\n" +
" -DCMAKE_ARCHIVE_OUTPUT_DIRECTORY=%s/lib %s %s \\\n",
cmakeExe,
workingFolder,
buildFolder,
architecture,
headerFolder,
stagingFolder,
stagingFolder,
cmakeFlags,
flavorFlags);
if (hostOS == OSType.MacOS) {
body(" echo Executing %s", command);
body(" " + command);
if (target != null && target.length() > 0) {
body(String.format("echo %s --build %s --target %s -- -j8", cmakeExe, buildFolder, target));
body(String.format("%s --build %s --target %s -- -j8", cmakeExe, buildFolder, target));
} else {
body(String.format("echo %s --build %s -- -j8", cmakeExe, buildFolder));
body(String.format("%s --build %s -- -j8", cmakeExe, buildFolder));
}
body(" " + ABORT_LAST_FAILED);
if (lib != null && lib.length() > 0) {
String stagingLib = String.format("%s/lib/%s", stagingFolder, lib);
body(" if [ -f '%s' ]; then", stagingLib);
body(" mkdir -p %s/lib", redistFolder);
body(" cp %s %s/lib/%s", stagingLib, redistFolder, lib);
body(" " + ABORT_LAST_FAILED);
body(" else");
body(" echo CMAKEIFY ERROR: CMake build did not produce %s", stagingLib);
body(" exit -100");
body(" fi");
}
zips.put(zip.getAbsolutePath(), redistFolder.getPath());
body(" if [ -d '%s' ]; then", stagingFolder);
// Create a folder with something in it so there'e always something to zip
body(" mkdir -p %s", redistFolder);
bodyWithRedirect(" echo iOS %s %s > %s/cmakeify.txt", cmakeVersion, platform, redistFolder);
writeExtraIncludesToBody(includes, headerFolder);
writeCreateZipFromRedistFolderToBody(zip, redistFolder);
writeCreateHeaderZip(headers, headerFolder);
writeZipFileStatisticsToBody(zip);
if (lib == null || lib.length() > 0) {
body(" else");
body(" echo CMAKEIFY ERROR: Build did not produce an output in %s", stagingFolder);
body(" exit -200");
}
body(" fi");
// Still create the manifest for what would have been built.
cdep(" - lib: %s", lib);
cdep(" file: %s", zip.getName());
cdep(" sha256: $SHASUM256");
cdep(" size: $ARCHIVESIZE");
if (multipleFlavor) {
cdep(" flavor: %s", flavor);
}
cdep(" platform: %s", platform);
cdep(" architecture: %s", architecture);
cdep(" sdk: %s", sdk);
if (multipleCMake) {
cdep(" builder: cmake-%s", cmakeVersion);
}
body("fi");
}
return this;
}
private boolean isSupportediOSPlatformArchitecture(iOSPlatform platform, iOSArchitecture architecture) {
if (platform.equals(iOSPlatform.iPhoneOS)) {
if (architecture.equals(iOSArchitecture.arm64)) {
return true;
}
if (architecture.equals(iOSArchitecture.armv7)) {
return true;
}
return architecture.equals(iOSArchitecture.armv7s);
}
if (platform.equals(iOSPlatform.iPhoneSimulator)) {
if (architecture.equals(iOSArchitecture.i386)) {
return true;
}
return architecture.equals(iOSArchitecture.x86_64);
}
throw new RuntimeException(platform.toString());
}
private void writeCreateZipFromRedistFolderToBody(File zip, File folder) {
body(" pushd %s", folder);
body(" " + ABORT_LAST_FAILED);
body(" zip %s . -r", zip);
body(" " + ABORT_LAST_FAILED);
body(" if [ -f '%s' ]; then", zip);
body(" echo Zip %s was created", zip);
body(" else");
body(" echo CMAKEIFY ERROR: Zip %s was not created", zip);
body(" exit -402");
body(" fi");
body(" popd");
body(" " + ABORT_LAST_FAILED);
}
private void writeExtraIncludesToBody(String[] includes, File includesRedistFolder) {
if (includes != null) {
for (String include : includes) {
body(" if [ ! -d '%s/%s' ]; then", workingFolder, include);
body(" echo CMAKEIFY ERROR: Extra include folder '%s/%s' does not exist", workingFolder, include);
body(" exit -600");
body(" fi");
body(" pushd %s", workingFolder);
if (include.startsWith("include")) {
body(" echo find %s -name '*.h' {pipe} cpio -pdm %s", include, includesRedistFolder);
body(" find %s -name '*.h' | cpio -pdm %s", include, includesRedistFolder);
body(" echo find %s -name '*.hpp' {pipe} cpio -pdm %s", include, includesRedistFolder);
body(" find %s -name '*.hpp' | cpio -pdm %s", include, includesRedistFolder);
} else {
body(" find %s -name '*.h' | cpio -pdm %s/include", include, includesRedistFolder);
body(" find %s -name '*.hpp' | cpio -pdm %s/include", include, includesRedistFolder);
}
body(" popd");
body(" " + ABORT_LAST_FAILED);
}
}
}
@Override
ScriptBuilder startBuilding(OS target) {
switch (target) {
case android:
cdep("android:");
cdep(" archives:");
return this;
case linux:
cdep("linux:");
cdep(" archives:");
return this;
case windows:
cdep("windows:");
cdep(" archives:");
return this;
case iOS:
cdep("iOS:");
cdep(" archives:");
return this;
}
throw new RuntimeException(target.toString());
}
@Override
ScriptBuilder buildRedistFiles(File workingFolder, String[] includes, String example) {
if (example != null && example.length() > 0) {
cdep("example: |");
String lines[] = example.split("\\r?\\n");
for (String line : lines) {
cdep(" %s", line);
}
}
body("cat %s", cdepFile);
body("echo - %s", new File(cdepFile.getParentFile(), "cdep-manifest.yml"));
for (String zip : zips.keySet()) {
String relativeZip = new File(".").toURI().relativize(new File(zip).toURI()).getPath();
body("if [ -f '%s' ]; then", relativeZip);
body(" echo - %s", relativeZip);
body("fi");
}
return this;
}
@Override
ScriptBuilder deployRedistFiles(
RemoteArchive githubRelease,
OS[] allTargets,
boolean uploadBadges) {
File combinedManifest = new File(cdepFile.getParentFile(), "cdep-manifest.yml");
File headers = new File(cdepFile.getParentFile(), "headers.zip");
body("echo ${cdep} merge headers %s %s include %s", cdepFile, headers, cdepFile);
body("${cdep} merge headers %s %s include %s", cdepFile, headers, cdepFile);
body(ABORT_LAST_FAILED);
if (targetVersion == null || targetVersion.length() == 0 || targetVersion.equals("0.0.0")) {
body("echo Skipping upload because targetVersion='%s' %s", targetVersion, targetVersion.length());
if (!combinedManifest.equals(cdepFile)) {
body("# cdep-manifest.yml tracking: %s to %s", cdepFile, combinedManifest);
body("cp %s %s", cdepFile, combinedManifest);
body(ABORT_LAST_FAILED);
} else {
body("# cdep-manifest.yml tracking: not copying because it has the same name as combined");
body("echo not copying %s to %s because it was already there. Still merge head", combinedManifest, cdepFile);
body("ls %s", combinedManifest.getParent());
body(ABORT_LAST_FAILED);
}
return this;
}
body("echo Not skipping upload because targetVersion='%s' %s", targetVersion, targetVersion.length());
// Merging manifests from multiple travis runs is a PITA.
// All runs need to upload cdep-manifest-[targetOS].yml.
// The final run needs to figure out that it is the final run and also upload a merged
// cdep-manifest.yml.
// None of this needs to happen if specificTargetOS is null because that means there aren't
// multiple travis runs.
if (specificTargetOS != null) {
assert !cdepFile.toString().endsWith("cdep-manifest.yml");
if (allTargets.length == 1) {
// There is a specificTargetOS specified but it is the only one.
// We can combine the file locally.
body("cp %s %s", cdepFile, combinedManifest);
body(ABORT_LAST_FAILED);
upload(headers, githubRelease);
body(ABORT_LAST_FAILED);
upload(combinedManifest, githubRelease);
body(ABORT_LAST_FAILED);
} else {
// Accumulate a list of all targets to merge except for this one
String otherCoordinates = "";
for (OS os : allTargets) {
if (os != specificTargetOS) {
otherCoordinates += String.format("%s:%s/%s:%s ", targetGroupId, targetArtifactId, os, targetVersion);
}
}
// Now add this file
String coordinates = otherCoordinates + cdepFile.toString();
// Merge any existing manifest with the currently generated one.
body("echo ${cdep} merge %s %s", coordinates, combinedManifest);
body("${cdep} merge %s %s", coordinates, combinedManifest);
body(ABORT_LAST_FAILED);
// If the merge succeeded, that means we got all of the coordinates.
// We can upload. Also need to fetch any partial dependencies so that
// downstream calls to ./cdep for tests will have assets all ready.
body("if [ -f '%s' ]; then", combinedManifest);
body(" echo Fetching partial dependencies");
body(" echo ${cdep} fetch %s", coordinates);
body(" ${cdep} fetch %s", coordinates);
body(" " + ABORT_LAST_FAILED);
body(" echo Uploading %s", combinedManifest);
upload(headers, githubRelease);
body(ABORT_LAST_FAILED);
upload(combinedManifest, githubRelease);
body(ABORT_LAST_FAILED);
if (uploadBadges) {
uploadBadges();
}
body("else");
// If the merged failed then we still have to create a combined manifest for test
// purposes but it won't be uploaded. Do the header merge at the same time as the
// copy.
body(" echo ${cdep} merge headers %s %s include %s", cdepFile, headers, combinedManifest);
body(" ${cdep} merge headers %s %s include %s", cdepFile, headers, combinedManifest);
body(" " + ABORT_LAST_FAILED);
body("fi");
// Upload the uncombined manifest
upload(cdepFile, githubRelease);
}
} else {
// There is not a specificTargetOS so there aren't multiple travis runs.
// Just upload cdep-manifest.yml.
assert cdepFile.toString().endsWith("cdep-manifest.yml");
upload(headers, githubRelease);
body(ABORT_LAST_FAILED);
upload(cdepFile, githubRelease);
body(ABORT_LAST_FAILED);
if (uploadBadges) {
uploadBadges();
}
}
for (String zip : zips.keySet()) {
String relativeZip = new File(".").toURI().relativize(new File(zip).toURI()).getPath();
body("if [ -f '%s' ]; then", relativeZip);
body(" echo Uploading %s", relativeZip);
upload(new File(relativeZip), githubRelease);
body("fi");
}
return this;
}
private void upload(File file, RemoteArchive githubRelease) {
String user = targetGroupId.substring(targetGroupId.lastIndexOf(".") + 1);
body(" echo %s/%s/github-release upload --user %s --repo %s --tag %s --name %s --file %s",
TOOLS_FOLDER,
getHostArchive(githubRelease).unpackroot,
user,
targetArtifactId,
targetVersion, file.getName(), file.getAbsolutePath());
body(" %s/%s/github-release upload --user %s --repo %s --tag %s --name %s --file %s",
TOOLS_FOLDER,
getHostArchive(githubRelease).unpackroot,
user,
targetArtifactId,
targetVersion, file.getName(), file.getAbsolutePath());
body(ABORT_LAST_FAILED);
}
private ScriptBuilder uploadBadges() {
// Record build information
String badgeUrl = String.format("%s:%s:%s", targetGroupId, targetArtifactId, targetVersion);
badgeUrl = badgeUrl.replace(":", "%3A");
badgeUrl = badgeUrl.replace("-", "
badgeUrl = String.format("https://img.shields.io/badge/cdep-%s-brightgreen.svg", badgeUrl);
String badgeFolder = String.format("%s/%s", targetGroupId, targetArtifactId);
body("if [ -n \"$TRAVIS_TAG\" ]; then");
body(" if [ -n \"$CDEP_BADGES_API_KEY\" ]; then");
body(" echo git clone https://github.com/cdep-io/cdep-io.github.io.git");
body(" git clone https://github.com/cdep-io/cdep-io.github.io.git");
body(" " + ABORT_LAST_FAILED);
body(" pushd cdep-io.github.io");
body(" mkdir -p %s/latest", badgeFolder);
bodyWithRedirect(" echo curl %s > %s/latest/latest.svg ", badgeUrl, badgeFolder);
bodyWithRedirect(" curl %s > %s/latest/latest.svg ", badgeUrl, badgeFolder);
body(" " + ABORT_LAST_FAILED);
body(" echo git add %s/latest/latest.svg", badgeFolder);
body(" git add %s/latest/latest.svg", badgeFolder);
body(" " + ABORT_LAST_FAILED);
body(" echo git -c user.name='cmakeify' -c user.email='cmakeify' commit -m init");
body(" git -c user.name='cmakeify' -c user.email='cmakeify' commit -m init");
body(" " + ABORT_LAST_FAILED);
body(" echo git push -f -q https://cdep-io:$CDEP_BADGES_API_KEY@github.com/cdep-io/cdep-io.github.io");
body(" git push -f -q https://cdep-io:$CDEP_BADGES_API_KEY@github.com/cdep-io/cdep-io.github.io");
body(" " + ABORT_LAST_FAILED);
body(" popd");
body(" else");
body(" echo Add CDEP_BADGES_API_KEY to Travis settings to get badges!");
body(" fi");
body("fi");
return this;
}
@Override
public String toString() {
return body.toString();
}
} |
package org.purl.wf4ever.robundle;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.nio.charset.Charset;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.junit.Test;
public class TestBundles {
protected void checkSignature(Path zip) throws IOException {
String MEDIATYPE = "application/vnd.wf4ever.robundle+zip";
// Check position 30++ according to RO Bundle specification
// http://purl.org/wf4ever/ro-bundle#ucf
byte[] expected = ("mimetype" + MEDIATYPE + "PK").getBytes("ASCII");
try (InputStream in = Files.newInputStream(zip)) {
byte[] signature = new byte[expected.length];
int MIME_OFFSET = 30;
assertEquals(MIME_OFFSET, in.skip(MIME_OFFSET));
assertEquals(expected.length, in.read(signature));
assertArrayEquals(expected, signature);
}
}
@Test
public void close() throws Exception {
Bundle bundle = Bundles.createBundle();
assertTrue(Files.exists(bundle.getSource()));
assertTrue(bundle.getFileSystem().isOpen());
bundle.close();
assertFalse(Files.exists(bundle.getSource()));
assertFalse(bundle.getFileSystem().isOpen());
}
@Test
public void closeAndOpenBundle() throws Exception {
Bundle bundle = Bundles.createBundle();
Path zip = Bundles.closeBundle(bundle);
Bundles.openBundle(zip);
}
@Test
public void closeAndOpenBundleWithStringValue() throws Exception {
Bundle bundle = Bundles.createBundle();
Path hello = bundle.getRoot().resolve("hello.txt");
Bundles.setStringValue(hello, "Hello");
Path zip = Bundles.closeBundle(bundle);
Bundle newBundle = Bundles.openBundle(zip);
Path newHello = newBundle.getRoot().resolve("hello.txt");
assertEquals("Hello", Bundles.getStringValue(newHello));
}
@Test
public void closeAndSaveBundle() throws Exception {
Bundle bundle = Bundles.createBundle();
Path destination = Files.createTempFile("test", ".zip");
Files.delete(destination);
assertFalse(Files.exists(destination));
Bundles.closeAndSaveBundle(bundle, destination);
assertTrue(Files.exists(destination));
}
@Test
public void closeBundle() throws Exception {
Bundle bundle = Bundles.createBundle();
Path zip = Bundles.closeBundle(bundle);
assertTrue(Files.isReadable(zip));
assertEquals(zip, bundle.getSource());
checkSignature(zip);
}
@Test
public void createBundle() throws Exception {
Path source = null;
try (Bundle bundle = Bundles.createBundle()) {
assertTrue(Files.isDirectory(bundle.getRoot()));
source = bundle.getSource();
assertTrue(Files.exists(source));
}
// As it was temporary file it should be deleted on close
assertFalse(Files.exists(source));
}
@Test
public void createBundlePath() throws Exception {
Path source = Files.createTempFile("test", ".zip");
try (Bundle bundle = Bundles.createBundle(source)) {
assertTrue(Files.isDirectory(bundle.getRoot()));
assertEquals(source, bundle.getSource());
assertTrue(Files.exists(source));
}
// As it was a specific path, it should NOT be deleted on close
assertTrue(Files.exists(source));
}
@Test
public void getReference() throws Exception {
Bundle bundle = Bundles.createBundle();
Path hello = bundle.getRoot().resolve("hello");
Bundles.setReference(hello, URI.create("http://example.org/test"));
URI uri = Bundles.getReference(hello);
assertEquals("http://example.org/test", uri.toASCIIString());
}
@Test
public void getReferenceFromWin8() throws Exception {
Bundle bundle = Bundles.createBundle();
Path win8 = bundle.getRoot().resolve("win8");
Path win8Url = bundle.getRoot().resolve("win8.url");
Files.copy(getClass().getResourceAsStream("/win8.url"), win8Url);
URI uri = Bundles.getReference(win8);
assertEquals("http://example.com/made-in-windows-8", uri.toASCIIString());
}
@Test
public void getStringValue() throws Exception {
Bundle bundle = Bundles.createBundle();
Path hello = bundle.getRoot().resolve("hello");
String string = "A string";
Bundles.setStringValue(hello, string);
assertEquals(string, Bundles.getStringValue(hello));
assertEquals(null, Bundles.getStringValue(null));
}
protected boolean isEmpty(Path path) throws IOException {
try (DirectoryStream<Path> ds = Files.newDirectoryStream(path)) {
return !ds.iterator().hasNext();
}
}
@Test
public void isMissing() throws Exception {
Bundle bundle = Bundles.createBundle();
Path missing = bundle.getRoot().resolve("missing");
assertFalse(Bundles.isValue(missing));
assertTrue(Bundles.isMissing(missing));
assertFalse(Bundles.isReference(missing));
}
@Test
public void isReference() throws Exception {
Bundle bundle = Bundles.createBundle();
Path ref = bundle.getRoot().resolve("ref");
Bundles.setReference(ref, URI.create("http://example.org/test"));
assertTrue(Bundles.isReference(ref));
assertFalse(Bundles.isMissing(ref));
assertFalse(Bundles.isValue(ref));
}
@Test
public void isValue() throws Exception {
Bundle bundle = Bundles.createBundle();
Path hello = bundle.getRoot().resolve("hello");
Bundles.setStringValue(hello, "Hello");
assertTrue(Bundles.isValue(hello));
assertFalse(Bundles.isReference(hello));
}
protected List<String> ls(Path path) throws IOException {
List<String> paths = new ArrayList<>();
try (DirectoryStream<Path> ds = Files.newDirectoryStream(path)) {
for (Path p : ds) {
paths.add(p.getFileName() + "");
}
}
Collections.sort(paths);
return paths;
}
@Test
public void safeMove() throws Exception {
Path tmp = Files.createTempDirectory("test");
Path f1 = tmp.resolve("f1");
Files.createFile(f1);
assertFalse(isEmpty(tmp));
Bundle db = Bundles.createBundle();
Path f2 = db.getRoot().resolve("f2");
Bundles.safeMove(f1, f2);
assertTrue(isEmpty(tmp));
assertEquals(Arrays.asList("f2", "mimetype"), ls(db.getRoot()));
}
@Test(expected = IOException.class)
public void safeMoveFails() throws Exception {
Path tmp = Files.createTempDirectory("test");
Path f1 = tmp.resolve("f1");
Path d1 = tmp.resolve("d1");
Files.createFile(f1);
Files.createDirectory(d1);
try {
Bundles.safeMove(f1, d1);
} finally {
assertTrue(Files.exists(f1));
assertEquals(Arrays.asList("d1", "f1"), ls(tmp));
}
}
@Test
public void setReference() throws Exception {
Bundle bundle = Bundles.createBundle();
Path ref = bundle.getRoot().resolve("ref");
Bundles.setReference(ref, URI.create("http://example.org/test"));
URI uri = URI.create("http://example.org/test");
Path f = Bundles.setReference(ref, uri);
assertEquals("ref.url", f.getFileName().toString());
assertEquals(bundle.getRoot(), f.getParent());
assertFalse(Files.exists(ref));
List<String> uriLines = Files.readAllLines(f, Charset.forName("ASCII"));
assertEquals(3, uriLines.size());
assertEquals("[InternetShortcut]", uriLines.get(0));
assertEquals("URL=http://example.org/test", uriLines.get(1));
assertEquals("", uriLines.get(2));
}
@Test
public void setReferenceIri() throws Exception {
Bundle bundle = Bundles.createBundle();
Path ref = bundle.getRoot().resolve("ref");
URI uri = new URI("http", "xn--bcher-kva.example.com", "/s\u00F8iland/\u2603snowman", "\u2605star");
Path f = Bundles.setReference(ref, uri);
List<String> uriLines = Files.readAllLines(f, Charset.forName("ASCII"));
// TODO: Double-check that this is actually correct escaping :)
assertEquals("URL=http://xn--bcher-kva.example.com/s%C3%B8iland/%E2%98%83snowman#%E2%98%85star",
uriLines.get(1));
}
@Test
public void setStringValue() throws Exception {
Bundle bundle = Bundles.createBundle();
Path file = bundle.getRoot().resolve("file");
String string = "A string";
Bundles.setStringValue(file, string);
assertEquals(string, Files.readAllLines(file, Charset.forName("UTF-8")).get(0));
}
@Test
public void withExtension() throws Exception {
Path testDir = Files.createTempDirectory("test");
Path fileTxt = testDir.resolve("file.txt");
assertEquals("file.txt", fileTxt.getFileName().toString()); // better be!
Path fileHtml = Bundles.withExtension(fileTxt, ".html");
assertEquals(fileTxt.getParent(), fileHtml.getParent());
assertEquals("file.html", fileHtml.getFileName().toString());
Path fileDot = Bundles.withExtension(fileTxt, ".");
assertEquals("file.", fileDot.getFileName().toString());
Path fileEmpty = Bundles.withExtension(fileTxt, "");
assertEquals("file", fileEmpty.getFileName().toString());
Path fileDoc = Bundles.withExtension(fileEmpty, ".doc");
assertEquals("file.doc", fileDoc.getFileName().toString());
Path fileManyPdf = Bundles.withExtension(fileTxt, ".test.many.pdf");
assertEquals("file.test.many.pdf", fileManyPdf.getFileName().toString());
Path fileManyTxt = Bundles.withExtension(fileManyPdf, ".txt");
assertEquals("file.test.many.txt", fileManyTxt.getFileName().toString());
}
} |
package com.kpelykh.docker.client.model;
import java.util.List;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.annotate.JsonProperty;
/**
*
* @author Konstantin Pelykh (kpelykh@gmail.com)
*
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class Container {
@JsonProperty("Id")
private String id;
@JsonProperty("Command")
private String command;
@JsonProperty("Image")
private String image;
@JsonProperty("Created")
private long created;
@JsonProperty("Status")
private String status;
/*
* Example: "Ports": { "22/tcp": [ { "HostIp": "0.0.0.0", "HostPort": "8022" } ] }
*/
@JsonProperty("Ports")
public List<Container.Port> ports;
@JsonProperty("SizeRw")
private int size;
@JsonProperty("SizeRootFs")
private int sizeRootFs;
public String getId() {
return id;
}
public String getCommand() {
return command;
}
public String getImage() {
return image;
}
public long getCreated() {
return created;
}
public String getStatus() {
return status;
}
public List<Container.Port> getPorts() {
return ports;
}
public void setPorts(List<Container.Port> ports) {
this.ports = ports;
}
public int getSize() {
return size;
}
public int getSizeRootFs() {
return sizeRootFs;
}
@Override
public String toString() {
return "Container{" + "id='" + id + '\'' + ", command='" + command + '\'' + ", image='" + image + '\''
+ ", created=" + created + ", status='" + status + '\'' + ", ports=" + ports + ", size=" + size
+ ", sizeRootFs=" + sizeRootFs + '}';
}
@JsonIgnoreProperties(ignoreUnknown = true)
public static class Port {
@JsonProperty("PrivatePort")
private int privatePort;
@JsonProperty("IP")
private String ip;
@JsonProperty("PublicPort")
private int publicPort;
@JsonProperty("Type")
private String type;
public void setPrivatePort(int privatePort) {
this.privatePort = privatePort;
}
public void setIp(String ip) {
this.ip = ip;
}
public void setPublicPort(int publicPort) {
this.publicPort = publicPort;
}
public void setType(String type) {
this.type = type;
}
public int getPrivatePort() {
return privatePort;
}
public String getIp() {
return ip;
}
public int getPublicPort() {
return publicPort;
}
public String getType() {
return type;
}
}
} |
package seedu.taskell.model.task;
import seedu.taskell.commons.exceptions.IllegalValueException;
import org.junit.Test;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertTrue;
//@@author A0139257X
public class TaskDateTest {
@Test
public void assertValidFormatBehaviourForDate() {
//Valid Day of the Week
assertTrue(TaskDate.isValidDate("mon"));
assertTrue(TaskDate.isValidDate("tue"));
assertTrue(TaskDate.isValidDate("WED"));
assertTrue(TaskDate.isValidDate("thurs"));
assertTrue(TaskDate.isValidDate("fRi"));
assertTrue(TaskDate.isValidDate("saturday"));
assertTrue(TaskDate.isValidDate("sun"));
//Valid Month
assertTrue(TaskDate.isValidDate("jan"));
assertTrue(TaskDate.isValidDate("fEb"));
assertTrue(TaskDate.isValidDate("march"));
assertTrue(TaskDate.isValidDate("APRIL"));
assertTrue(TaskDate.isValidDate("mAy"));
assertTrue(TaskDate.isValidDate("junE"));
assertTrue(TaskDate.isValidDate("jul"));
assertTrue(TaskDate.isValidDate("aug"));
assertTrue(TaskDate.isValidDate("sept"));
assertTrue(TaskDate.isValidDate("oct"));
assertTrue(TaskDate.isValidDate("November"));
assertTrue(TaskDate.isValidDate("December"));
//Valid Month and Year
assertTrue(TaskDate.isValidDate("may 2016"));
assertTrue(TaskDate.isValidDate("may-2016"));
assertTrue(TaskDate.isValidDate("may.2016"));
assertTrue(TaskDate.isValidDate("may/2016"));
//Valid Day and Month
assertTrue(TaskDate.isValidDate("1 jan"));
assertTrue(TaskDate.isValidDate("1-jan"));
assertTrue(TaskDate.isValidDate("1.jan"));
assertTrue(TaskDate.isValidDate("1/jan"));
//Valid full Date
assertTrue(TaskDate.isValidDate(TaskDate.DEFAULT_DATE));
assertTrue(TaskDate.isValidDate("1 1 2016"));
assertTrue(TaskDate.isValidDate("1 jan 2016"));
assertTrue(TaskDate.isValidDate("1-1-2016"));
assertTrue(TaskDate.isValidDate("1-jan-2016"));
assertTrue(TaskDate.isValidDate("1.1.2016"));
assertTrue(TaskDate.isValidDate("8.DeCeMbEr.2016"));
assertTrue(TaskDate.isValidDate("8/8/2016"));
assertTrue(TaskDate.isValidDate("8/jan/2016"));
assertTrue(TaskDate.isValidDate("1-1/2016"));
assertTrue(TaskDate.isValidDate("1-jan/2016"));
assertTrue(TaskDate.isValidDate("1-1.2016"));
assertTrue(TaskDate.isValidDate("1-jan.2016"));
assertTrue(TaskDate.isValidDate("1/1-2016"));
assertTrue(TaskDate.isValidDate("1/jan-2016"));
assertTrue(TaskDate.isValidDate("1/1.2016"));
assertTrue(TaskDate.isValidDate("1/jan.2016"));
assertTrue(TaskDate.isValidDate("1.1/2016"));
assertTrue(TaskDate.isValidDate("1.jan/2016"));
assertTrue(TaskDate.isValidDate("1.1-2016"));
assertTrue(TaskDate.isValidDate("1.jan-2016"));
//Valid Today
assertTrue(TaskDate.isValidDate("Today"));
assertTrue(TaskDate.isValidDate("tdy"));
//Valid Tomorrow
assertTrue(TaskDate.isValidDate("Tomorrow"));
assertTrue(TaskDate.isValidDate("tmr"));
}
@Test
public void assertInvalidFormatBehaviourForDate() {
assertFalse(TaskDate.isValidDate(""));
assertFalse(TaskDate.isValidDate(null));
assertFalse(TaskDate.isValidDate("1st January"));
assertFalse(TaskDate.isValidDate("1/2"));
assertFalse(TaskDate.isValidDate("01022016"));
assertFalse(TaskDate.isValidDate("2016"));
assertFalse(TaskDate.isValidDate("NotAValidDate"));
}
@Test
public void assertNewTaskDateBehaviour() throws IllegalValueException {
TaskDate today = TaskDate.getTodayDate();
TaskDate validDayOfWeek = new TaskDate(today.getDayNameInWeek());
assertEquals(today.getNextWeek(), validDayOfWeek);
TaskDate validMonth = new TaskDate("september");
assertEquals("1-9-2016", validMonth.toString());
TaskDate validMonthAndYear = new TaskDate("dec-2016");
assertEquals("1-12-2016", validMonthAndYear.toString());
TaskDate validDayAndMonth = new TaskDate("1-jan");
assertEquals("1-1-2016", validDayAndMonth.toString());
TaskDate validFullDate = new TaskDate("1-1-2011");
assertEquals("1-1-2011", validFullDate.toString());
TaskDate validToday = new TaskDate("today");
DateTimeFormatter standardFormat = DateTimeFormatter.ofPattern("d-MM-yyyy");
assertEquals(LocalDate.now().format(standardFormat), validToday.toString());
TaskDate validTomorrow = new TaskDate("tmr");
standardFormat = DateTimeFormatter.ofPattern("d-MM-yyyy");
assertEquals(LocalDate.now().plusDays(1).format(standardFormat), validTomorrow.toString());
try {
TaskDate invalidDate = new TaskDate("NOT-A-VALID-DATE");
} catch (IllegalValueException ive) {
assertEquals(TaskDate.MESSAGE_TASK_DATE_CONSTRAINTS, ive.getMessage());
}
}
@Test
public void assertCorrectTodayDate() {
DateTimeFormatter standardFormat = DateTimeFormatter.ofPattern("d-MM-yyyy");
assertEquals(LocalDate.now().format(standardFormat), TaskDate.getTodayDate().toString());
}
@Test
public void assertCorrectTomorrowDate() {
DateTimeFormatter standardFormat = DateTimeFormatter.ofPattern("d-MM-yyyy");
assertEquals(LocalDate.now().plusDays(1).format(standardFormat), TaskDate.getTomorrowDate().toString());
}
@Test
public void assertCorrectThisYear() {
assertEquals(LocalDate.now().getYear() + "", TaskDate.getThisYear());
}
@Test
public void assertCorrectGetNextDay() throws IllegalValueException {
TaskDate today = new TaskDate("1-1-2016");
TaskDate nextDay = new TaskDate("2-1-2016");
assertEquals(nextDay, today.getNextDay());
}
@Test
public void assertCorrectGetNextWeek() throws IllegalValueException {
TaskDate today = new TaskDate("1-1-2016");
TaskDate nextWeek = new TaskDate("8-1-2016");
assertEquals(nextWeek, today.getNextWeek());
}
@Test
public void assertCorrectLocalDate() throws IllegalValueException {
TaskDate date = new TaskDate("1-1-2100");
LocalDate actual = date.getLocalDate();
LocalDate expected = LocalDate.of(2100, 1, 1);
assertEquals(expected, actual);
}
@Test
public void assertDateisBeforeBehaviour() throws IllegalValueException {
TaskDate startDate = new TaskDate("1-1-2100");
TaskDate endDateDiffDaySameMonthSameYear = new TaskDate("10-1-2100");
TaskDate endDateSameDayDiffMonthSameYear = new TaskDate("1-2-2100");
TaskDate endDateSameDaySameMonthDiffYear = new TaskDate("1-1-2200");
assertTrue(startDate.isBefore(endDateDiffDaySameMonthSameYear));
assertTrue(startDate.isBefore(endDateSameDayDiffMonthSameYear));
assertTrue(startDate.isBefore(endDateSameDaySameMonthDiffYear));
assertFalse(endDateDiffDaySameMonthSameYear.isBefore(startDate));
assertFalse(endDateSameDayDiffMonthSameYear.isBefore(startDate));
assertFalse(endDateSameDaySameMonthDiffYear.isBefore(startDate));
}
@Test
public void assertDateisAfterBehaviour() throws IllegalValueException {
TaskDate startDate = new TaskDate("1-1-2100");
TaskDate endDateDiffDaySameMonthSameYear = new TaskDate("10-1-2100");
TaskDate endDateSameDayDiffMonthSameYear = new TaskDate("1-2-2100");
TaskDate endDateSameDaySameMonthDiffYear = new TaskDate("1-1-2200");
assertTrue(endDateDiffDaySameMonthSameYear.isAfter(startDate));
assertTrue(endDateSameDayDiffMonthSameYear.isAfter(startDate));
assertTrue(endDateSameDaySameMonthDiffYear.isAfter(startDate));
assertFalse(startDate.isAfter(endDateDiffDaySameMonthSameYear));
assertFalse(startDate.isAfter(endDateSameDayDiffMonthSameYear));
assertFalse(startDate.isAfter(endDateSameDaySameMonthDiffYear));
}
@Test
public void assertCorrectDisplayDate() throws IllegalValueException {
TaskDate date = new TaskDate("22-10-2016");
assertEquals("Saturday, 22 October 2016", date.getDisplayDate());
}
@Test
public void assertCorrectToString() throws IllegalValueException {
TaskDate date = new TaskDate("1-1-2015");
assertEquals("1-1-2015", date.toString());
}
@Test
public void assertEqualsBehaviour() throws IllegalValueException {
TaskDate date = new TaskDate("1-1-2015");
TaskDate sameDate = new TaskDate("1-1-2015");
TaskDate differentDate = new TaskDate("2-2-2016");
assertEquals(date, date);
assertEquals(date, sameDate);
assertNotSame(date, differentDate);
assertNotSame(date, "1-1-2015");
assertNotSame(date, "NOT A DATE");
assertNotSame(date, null);
}
} |
package com.laytonsmith.core.constructs;
import com.laytonsmith.PureUtilities.Version;
import com.laytonsmith.annotations.typeof;
import com.laytonsmith.core.MSVersion;
import com.laytonsmith.core.natives.interfaces.Mixed;
import java.util.concurrent.atomic.AtomicLong;
/**
* A resource is a large or mutable data structure that is kept in memory with external resource management. This makes
* certain things more efficient, like string builders, xml parser, streams, etc, at the cost of making user code
* slightly more complicated. Therefore, this is a stopgap measure that WILL be removed at some point, once Objects are
* created.
*/
@typeof("ms.lang.Resource")
public class CResource<T> extends Construct {
@SuppressWarnings("FieldNameHidesFieldInSuperclass")
public static final CClassType TYPE = CClassType.get(CResource.class);
private static final AtomicLong RESOURCE_POOL = new AtomicLong(0);
private final long id;
private final T resource;
private final ResourceToString toString;
/**
* Constructs a new CResource, given some underlying object.
*
* @param resource
* @param t
*/
public CResource(final T resource, Target t) {
this(resource, new ResourceToString() {
@Override
public String getString(CResource id) {
// This is the original implementation of Object.toString()
String original = id.getResource().getClass().getName() + "@"
+ Integer.toHexString(id.getResource().hashCode());
String addendum = "";
if(!original.equals(id.getResource().toString())) {
addendum = original + ":";
}
return "Resource@" + id.getId() + ":"
+ addendum
+ id.getResource().toString();
}
}, t);
}
/**
* Constructs a new CResource, given some underlying object. The ResourceToString object allows you to override how
* this object is toString'd.
*
* @param resource
* @param toString
* @param t
*/
public CResource(T resource, ResourceToString toString, Target t) {
super("", ConstructType.RESOURCE, t);
this.resource = resource;
if(toString == null) {
throw new NullPointerException();
}
this.toString = toString;
id = RESOURCE_POOL.incrementAndGet();
}
public long getId() {
return id;
}
public T getResource() {
return resource;
}
@Override
public String val() {
return toString.getString(this);
}
@Override
public String toString() {
return val();
}
@Override
public boolean isDynamic() {
return true;
}
@Override
public String docs() {
return "A resource is a value that represents an underlying native object. The object cannot be accessed directly.";
}
@Override
public Version since() {
return MSVersion.V3_3_1;
}
public static interface ResourceToString {
/**
* Returns a toString for the underlying object.
*
* @param self The actual resource being toString'd.
* @return
*/
String getString(CResource self);
}
@Override
public CClassType[] getSuperclasses() {
return new CClassType[]{Mixed.TYPE};
}
@Override
public CClassType[] getInterfaces() {
return CClassType.EMPTY_CLASS_ARRAY;
}
} |
package tests.tickets;
import org.testng.Assert;
import org.testng.annotations.Test;
import pages.HomePage;
import pages.TicketsPage;
import tests.BaseTest;
public class SearchInfantTicketsTest extends BaseTest {
@Test(dataProvider = "tickets", dataProviderClass = TicketsData.class)
public void searchInfantTickets(int adults, int children, int infants) {
TicketsPage ticketsPage = new HomePage().openAirTicketsPage();
ticketsPage.searchTickets(adults, children, infants);
Assert.assertTrue(ticketsPage.isErrorMessageDisplayed(), "Error pop-up is not displayed.");
}
} |
package com.lesserhydra.secondchance;
import java.util.Arrays;
import java.util.Deque;
import java.util.Iterator;
import java.util.UUID;
import java.util.stream.Stream;
import org.bukkit.Bukkit;
import org.bukkit.Chunk;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.entity.ArmorStand;
import org.bukkit.entity.Entity;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.Player;
import org.bukkit.metadata.FixedMetadataValue;
import org.bukkit.scheduler.BukkitTask;
import com.lesserhydra.secondchance.configuration.ConfigOptions;
public class WorldHandler {
private final SecondChance plugin;
private final ConfigOptions options;
private final World world;
private Deque<Deathpoint> worldDeathpoints;
private BukkitTask particleTask;
private BukkitTask ambientSoundTask;
private BukkitTask timeCheckTask;
WorldHandler(SecondChance plugin, ConfigOptions options, World world) {
this.plugin = plugin;
this.options = options;
this.world = world;
}
public Stream<Deathpoint> deathpoints() {
return worldDeathpoints.stream();
}
public void destroyDeathpoint(Deathpoint deathpoint) {
deathpoint.destroy();
worldDeathpoints.remove(deathpoint);
}
void init() {
//Remove residual hitboxes in world
world.getEntities().stream()
.filter(e -> e.getType() == EntityType.ARMOR_STAND)
.map(e -> (ArmorStand) e)
.filter(SecondChance.compat()::armorstandIsHitbox)
.peek(e -> plugin.getLogger().warning("Removing residual armorstand."))
.forEach(Entity::remove);
//Initiate all deathpoints in world
this.worldDeathpoints = plugin.getSaveHandler().load(world);
worldDeathpoints.forEach(Deathpoint::spawnHitbox);
//Add initial "safe" positions to all online players in world
world.getPlayers().stream()
.filter(player -> !player.hasMetadata("lastSafePosition"))
.forEach(player -> player.setMetadata("lastSafePosition", new FixedMetadataValue(plugin, player.getLocation().add(0, 1, 0))));
//Start particle timer for world
particleTask = Bukkit.getScheduler().runTaskTimer(plugin, () -> worldDeathpoints.forEach(this::runParticles), 0, options.particleDelay);
//Start ambient sound timer for world
if (options.ambientSoundDelay > 0 && options.ambientSound.isEnabled()) {
ambientSoundTask = Bukkit.getScheduler().runTaskTimer(plugin, () -> worldDeathpoints.forEach(this::runAmbientSound), 0, options.ambientSoundDelay);
}
//Start time check timer for world
if (options.timeCheckDelay > 0 && options.ticksTillForget >= 0) {
timeCheckTask = Bukkit.getScheduler().runTaskTimer(plugin, this::updateTicksTillForget, 0, options.timeCheckDelay);
}
}
void deinit() {
//Cancel tasks
particleTask.cancel();
if (ambientSoundTask != null) ambientSoundTask.cancel();
if (timeCheckTask != null) timeCheckTask.cancel();
//Despawn hitboxes
worldDeathpoints.stream()
.forEach(Deathpoint::despawnHitbox);
//Save
plugin.getSaveHandler().save(world, worldDeathpoints);
//Clear members
worldDeathpoints = null;
}
void addDeathpoint(Deathpoint deathpoint) {
deathpoint.spawnHitbox();
worldDeathpoints.add(deathpoint);
}
void onChunkLoad(Chunk chunk) {
//Remove residual hitboxes
Arrays.stream(chunk.getEntities())
.filter(e -> e.getType() == EntityType.ARMOR_STAND)
.map(e -> (ArmorStand) e)
.filter(SecondChance.compat()::armorstandIsHitbox)
.peek(e -> plugin.getLogger().warning("Removing residual armorstand."))
.forEach(Entity::remove);
//Spawn deathpoint hitboxes
worldDeathpoints.stream()
.filter((point) -> chunk.equals(point.getLocation().getChunk()))
.forEach(Deathpoint::spawnHitbox);
}
void onChunkUnload(Chunk chunk) {
worldDeathpoints.stream()
.filter((point) -> chunk.equals(point.getLocation().getChunk()))
.forEach(Deathpoint::despawnHitbox);
}
void onWorldSave() {
//Save
plugin.getSaveHandler().save(world, worldDeathpoints);
//Despawn hitboxes
worldDeathpoints.stream()
.forEachOrdered(Deathpoint::despawnHitbox);
//Schedule hitbox respawn
final UUID worldUUID = world.getUID();
Bukkit.getScheduler().runTaskLater(plugin, () -> {
if (Bukkit.getWorld(worldUUID) == null) return;
worldDeathpoints.stream()
.forEach(Deathpoint::spawnHitbox);
}, 1);
}
void updateDeathsTillForget(Player player) {
for (Iterator<Deathpoint> it = worldDeathpoints.iterator(); it.hasNext();) {
Deathpoint deathpoint = it.next();
if (!deathpoint.getOwnerUniqueId().equals(player.getUniqueId())) continue;
if (deathpoint.updateDeathsTillForget()) forgetDeathpoint(deathpoint, it);
}
}
void updateTicksTillForget() {
for (Iterator<Deathpoint> it = worldDeathpoints.iterator(); it.hasNext();) {
Deathpoint deathpoint = it.next();
if (deathpoint.updateTicksTillForget(options.timeCheckDelay)) forgetDeathpoint(deathpoint, it);
}
}
void forgetDeathpoint(Deathpoint deathpoint, Iterator<Deathpoint> it) {
//Play sound and message for owner, if online
Player owner = Bukkit.getPlayer(deathpoint.getOwnerUniqueId());
if (owner != null) {
options.forgetSound.run(deathpoint.getLocation(), owner);
options.forgetMessage.sendMessage(owner, deathpoint);
}
//Forget deathpoint
if (options.dropItemsOnForget) deathpoint.dropItems();
if (options.dropExpOnForget) deathpoint.dropExperience();
deathpoint.destroy();
it.remove();
}
public World getWorld() {
return world;
}
private void runParticles(Deathpoint deathpoint) {
Location location = deathpoint.getLocation();
if (!location.getChunk().isLoaded()) return;
Player owner = Bukkit.getPlayer(deathpoint.getOwnerUniqueId());
options.particlePrimary.run(location, owner);
options.particleSecondary.run(location, owner);
}
private void runAmbientSound(Deathpoint deathpoint) {
Location location = deathpoint.getLocation();
if (!location.getChunk().isLoaded()) return;
Player owner = Bukkit.getPlayer(deathpoint.getOwnerUniqueId());
options.ambientSound.run(location, owner);
}
} |
package trinity.tests;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.tree.ParseTree;
import org.junit.Test;
import trinity.ErrorReporter;
import trinity.StandardErrorReporter;
import trinity.TrinityLexer;
import trinity.TrinityParser;
import trinity.customExceptions.ParseException;
import trinity.visitors.ReachabilityVisitor;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class ReachabilityVisitorTest {
private boolean reachabilityTest(String str) throws Exception {
ErrorReporter er = new StandardErrorReporter(false, str);
ReachabilityVisitor ReachabilityVisitor = new ReachabilityVisitor(er);
ANTLRInputStream input = new ANTLRInputStream(str);
TrinityLexer lexer = new TrinityLexer(input);
CommonTokenStream tokens = new CommonTokenStream(lexer);
TrinityParser parser = new TrinityParser(tokens);
ParseTree tree = parser.prog();
if (parser.getNumberOfSyntaxErrors() != 0) {
throw new ParseException("Invalid reachability test.");
}
tree.accept(ReachabilityVisitor);
return er.getErrorAmount() == 0;
}
@Test
public void testSimpleFunctionFalseOnly() throws Exception {
assertFalse(reachabilityTest("Scalar s () do\n" +
"end"));
assertFalse(reachabilityTest("Scalar k () do\n" +
" 1 + 1;\n" +
"end"));
}
@Test
public void testSimpleFunctionTrueOnly() throws Exception {
assertTrue(reachabilityTest("Scalar s () do\n" +
" return 1;\n" +
"end"));
}
@Test
public void testBlockHell() throws Exception {
assertFalse(reachabilityTest("Scalar s () do\n" +
" do\n" +
" end\n" +
"end"));
assertFalse(reachabilityTest("Scalar s () do\n" +
" do\n" +
" do\n" +
" end\n" +
" end\n" +
"end"));
}
@Test
public void testBlockHellNestedTrueOnly() throws Exception {
assertTrue(reachabilityTest("Scalar s () do\n" +
" do\n" +
" do\n" +
" return 1;" +
" end\n" +
" end\n" +
"end"));
assertTrue(reachabilityTest("Scalar s () do\n" +
" do\n" +
" do\n" +
" end\n" +
" return 1;" +
" end\n" +
"end"));
assertTrue(reachabilityTest("Scalar s () do\n" +
" do\n" +
" do\n" +
" end\n" +
" end\n" +
" return 1;" +
"end"));
}
@Test
public void testBlockHellContinuedFalseOnly() throws Exception {
assertFalse(reachabilityTest("Scalar s () do\n" +
" do\n" +
" end\n" +
" do\n" +
" end\n" +
"end"));
assertFalse(reachabilityTest("Scalar s () do\n" +
" do\n" +
" do\n" +
" end\n" +
" end\n" +
" do\n" +
" end\n" +
"end"));
assertTrue(reachabilityTest("Scalar s () do\n" +
" do\n" +
" do\n" +
" return 1;\n" +
" end\n" +
" end\n" +
" do\n" +
" end\n" +
"end"));
}
@Test
public void testBlockHellContinuedTrueOnly() throws Exception {
assertTrue(reachabilityTest("Scalar s () do\n" +
" do\n" +
" end\n" +
" do\n" +
" return 1;\n" +
" end\n" +
"end"));
assertTrue(reachabilityTest("Scalar s () do\n" +
" do\n" +
" do\n" +
" end\n" +
" end\n" +
" do\n" +
" return 1;\n" +
" end\n" +
"end"));
}
@Test
public void testFunctionsWithIfStatementFalseOnly() throws Exception {
assertFalse(reachabilityTest("Boolean b () do\n" +
" if true then\n" +
" return true;\n" +
" else\n" +
" 1 + 1;\n" +
" end\n" +
"end"));
assertFalse(reachabilityTest("Boolean b () do\n" +
" if true then\n" +
" 1 + 1;\n" +
" else\n" +
" return false;\n" +
" end\n" +
"end"));
assertFalse(reachabilityTest("Boolean b () do\n" +
" if true then\n" +
" 1 + 1;\n" +
" else\n" +
" 1 + 1;\n" +
" end\n" +
"end"));
}
@Test
public void testFunctionsWithIfStatementTrueOnly() throws Exception {
assertTrue(reachabilityTest("Boolean b () do\n" +
" if c then\n" +
" return true;\n" +
" else\n" +
" return false;\n" +
" end\n" +
"end"));
assertTrue(reachabilityTest("Scalar s () do\n" +
" do\n" +
" if cf then\n" +
" return true;\n" +
" else\n" +
" return false;\n" +
" end\n" +
" end\n" +
"end"));
assertTrue(reachabilityTest("Boolean f () do\n" +
" if a>b then\n" +
" return true;\n" +
" else\n" +
" 1 + 1;\n" +
" end\n" +
" return true;\n" +
"end"));
}
@Test
public void testForloop1() throws Exception {
assertFalse(reachabilityTest("Scalar f () do\n" +
" for Scalar a in [1 .. 10] do\n" +
" if a + 2 == 0 then\n" +
" return 4;\n" +
" end\n" +
" end\n" +
"end"));
}
@Test
public void testForloop2() throws Exception {
assertFalse(reachabilityTest("Scalar f () do\n" +
" for Scalar a in [1 .. 10] do\n" +
" if a + 2 == 0 then\n" +
" 1 + 4;\n" +
" end\n" +
" end\n" +
"end"));
}
@Test
public void testForloop3() throws Exception {
assertTrue(reachabilityTest("Scalar f () do\n" +
" for Scalar a in [1 .. 10] do\n" +
" if a + 2 == 0 then\n" +
" return 4;\n" +
" end\n" +
" end\n" +
" return 2;\n" +
"end"));
}
@Test
public void testForloop4() throws Exception {
assertTrue(reachabilityTest("Scalar f () do\n" +
" for Scalar a in [1 .. 10] do\n" +
" if a + 2 == 0 then\n" +
" 1 + 4;\n" +
" end\n" +
" end\n" +
" return 12;\n" +
"end"));
}
@Test
public void testForloop5() throws Exception {
assertTrue(reachabilityTest("Scalar f () do\n" +
" for Scalar a in [1 .. 10] do\n" +
" return 4;\n" +
" end\n" +
"end"));
}
@Test
public void testForloop6() throws Exception {
assertTrue(reachabilityTest("Scalar f () do\n" +
" for Scalar a in [1 .. 10] do\n" +
" return 4;\n" +
" end\n" +
" 1+1;\n" +
"end"));
}
@Test
public void fail() throws Exception {
assertTrue(reachabilityTest("Scalar dotp(Vector[3] a, Vector[3] b) do" +
" Scalar x = 0;" +
" return a*b;" +
"end" +
"Vector[3] v1 = [1,2,3];" +
"print dotp(v1,[4,5,6]);"));
}
} |
package com.lothrazar.cyclic.util;
import com.lothrazar.cyclic.ModCyclic;
import net.minecraft.block.Block;
import net.minecraft.block.BlockState;
import net.minecraft.block.Blocks;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.item.BlockItem;
import net.minecraft.item.BlockItemUseContext;
import net.minecraft.item.DirectionalPlaceContext;
import net.minecraft.item.Items;
import net.minecraft.state.Property;
import net.minecraft.tags.BlockTags;
import net.minecraft.util.Direction;
import net.minecraft.util.Rotation;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
public class UtilPlaceBlocks {
public static boolean rotateBlockValidState(World worldObj, BlockPos pos, Direction side) {
BlockState clicked = worldObj.getBlockState(pos);
if (clicked.getBlock() == null) {
return false;
}
Block clickedBlock = clicked.getBlock();
BlockState newState = null;
if (clickedBlock.isIn(BlockTags.SLABS)) {
final String key = "type"; //top or bottom
final String valueDupe = "double"; //actually theres 3 but dont worry about it
// clicked.get(property)
for (Property<?> prop : clicked.getProperties()) {
//yes
if (prop.getName().equals(key)) {
//then cycle me
newState = clicked.func_235896_a_(prop); // cycle
if (newState.get(prop).toString().equals(valueDupe)) {
//haha just hack and skip. turns into length 2. dont worry about it
newState = newState.func_235896_a_(prop);
}
}
}
}
else {
//default whatever
switch (side) {
case DOWN:
newState = clickedBlock.rotate(clicked, worldObj, pos, Rotation.CLOCKWISE_180);
break;
case EAST:
newState = clickedBlock.rotate(clicked, worldObj, pos, Rotation.CLOCKWISE_90);
break;
case NORTH:
newState = clickedBlock.rotate(clicked, worldObj, pos, Rotation.COUNTERCLOCKWISE_90);
break;
case SOUTH:
newState = clickedBlock.rotate(clicked, worldObj, pos, Rotation.CLOCKWISE_90);
break;
case UP:
newState = clickedBlock.rotate(clicked, worldObj, pos, Rotation.CLOCKWISE_180);
break;
case WEST:
newState = clickedBlock.rotate(clicked, worldObj, pos, Rotation.COUNTERCLOCKWISE_90);
break;
default:
break;
}
}
if (newState != null) {
return worldObj.setBlockState(pos, newState);
}
return false;
}
public static boolean placeStateSafe(World world, PlayerEntity player,
BlockPos placePos, BlockState placeState) {
return placeStateSafe(world, player, placePos, placeState, false);
}
/**
* This will return true only if world.setBlockState(..) returns true or if the block here is already identical
*
* @param world
* @param player
* @param placePos
* @param placeState
* @param playSound
* @return
*/
public static boolean placeStateSafe(World world, PlayerEntity player, BlockPos placePos, BlockState placeState, boolean playSound) {
if (placePos == null) {
return false;
}
BlockState stateHere = null;
// return false;
if (world.isAirBlock(placePos) == false) {
// if there is a block here, we might have to stop
stateHere = world.getBlockState(placePos);
if (stateHere != null) {
// Block blockHere = stateHere.getBlock();
// if (blockHere.isReplaceable(world, placePos) == false) {
// // for example, torches, and the top half of a slab if you click
// // in the empty space
// return false;
// ok its a soft (isReplaceable == true) block so try to break it first try to destroy it
// unless it is liquid, don't try to destroy liquid
//blockHere.getMaterial(stateHere)
if (stateHere.getMaterial().isLiquid() == false) {
boolean dropBlock = true;
if (world.isRemote == false) {
world.destroyBlock(placePos, dropBlock);
}
}
}
}
// if (placeState.getBlock() instanceof BlockLeaves) { //dont let them decay
// placeState = placeState.withProperty(BlockLeaves.DECAYABLE, false);
boolean success = false;
try {
// flags specifies what to update, '3' means notify client & neighbors
// isRemote to make sure we are in a server thread
if (world.isRemote == false) {
success = world.setBlockState(placePos, placeState, 3); // returns false when placement failed
}
}
catch (Exception e) {
// show exception from above, possibly failed placement
ModCyclic.LOGGER.error("Error attempting to place block ", e);
}
// play sound to area when placement is a success
if (success && playSound) {
// SoundType type = UtilSound.getSoundFromBlockstate(placeState, world, placePos);
// if (type != null && type.getPlaceSound() != null) {
// UtilSound.playSoundFromServer(type.getPlaceSound(), SoundCategory.BLOCKS, placePos, world.provider.getDimension(), UtilSound.RANGE_DEFAULT);
}
return success;
}
public static boolean destroyBlock(World world, BlockPos pos) {
world.removeTileEntity(pos);
return world.setBlockState(pos, Blocks.AIR.getDefaultState()); // world.destroyBlock(pos, false);
}
public static boolean placeTorchSafely(World world, BlockPos blockPos) {
BlockItem torch = (BlockItem) Items.TORCH;
BlockItemUseContext context = new DirectionalPlaceContext(world, blockPos, Direction.DOWN, Items.TORCH.getDefaultInstance(), Direction.DOWN);
return torch.tryPlace(context).isSuccessOrConsume();
}
} |
package com.nincraft.modpackdownloader;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import com.nincraft.modpackdownloader.util.Reference;
public class ModUpdater {
static Logger logger = LogManager.getRootLogger();
private static final String[] formats = { "yyyy-MM-dd'T'HH:mm:ss", "yyyy-MM-dd'T'HH:mm:ss'Z'",
"yyyy-MM-dd'T'HH:mm:ssZ", "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", "yyyy-MM-dd'T'HH:mm:ss.SSSZ",
"yyyy-MM-dd HH:mm:ss", "MM/dd/yyyy HH:mm:ss", "MM/dd/yyyy'T'HH:mm:ss.SSS'Z'", "MM/dd/yyyy'T'HH:mm:ss.SSSZ",
"MM/dd/yyyy'T'HH:mm:ss.SSS", "MM/dd/yyyy'T'HH:mm:ssZ", "MM/dd/yyyy'T'HH:mm:ss", "yyyy:MM:dd HH:mm:ss", };
public static void updateCurseMods(String manifestFile, String mcVersion, String releaseType) {
try {
Long projectID;
Long fileID;
JSONParser parser = new JSONParser();
JSONObject jsons = (JSONObject) parser.parse(new FileReader(manifestFile));
JSONArray fileList = (JSONArray) jsons.get("curseFiles");
if (fileList == null) {
fileList = (JSONArray) jsons.get("files");
}
if (fileList != null) {
Iterator iterator = fileList.iterator();
logger.info("Checking for updates from " + fileList.size() + " mods");
while (iterator.hasNext()) {
JSONObject modJson = (JSONObject) iterator.next();
projectID = (Long) modJson.get("projectID");
fileID = (Long) modJson.get("fileID");
String url = Reference.CURSEFORGE_BASE_URL + projectID + Reference.COOKIE_TEST_1;
HttpURLConnection con = (HttpURLConnection) (new URL(url).openConnection());
con.setInstanceFollowRedirects(false);
con.connect();
String location = con.getHeaderField("Location");
String projectName = location.split("/")[2];
JSONParser projectParser = new JSONParser();
JSONObject projectJson = getCurseProjectJson(projectID, projectName, projectParser);
JSONObject fileListJson = (JSONObject) projectJson.get("files");
Date lastDate = null;
Long mostRecent = fileID;
String mostRecentFile = null;
String currentFile = null;
for (Object thing : fileListJson.keySet()) {
JSONObject file = (JSONObject) fileListJson.get(thing);
Date date = parseDate((String) file.get("created_at"));
if (lastDate == null) {
lastDate = date;
}
if (lastDate.before(date) && file.get("type").equals(releaseType)
&& file.get("version").equals(mcVersion)) {
mostRecent = (Long) file.get("id");
mostRecentFile = (String) file.get("name");
lastDate = date;
}
if (fileID.equals((Long) file.get("id"))) {
currentFile = (String) file.get("name");
}
}
if (!mostRecent.equals(fileID)) {
logger.info("Update found for " + projectName + ". Most recent version is " + mostRecentFile
+ ". Old version was " + currentFile);
modJson.remove("fileID");
modJson.put("fileID", mostRecent);
}
if (!modJson.containsKey("name")) {
modJson.put("name", projectName);
}
}
}
FileWriter file = new FileWriter(manifestFile);
try {
file.write(jsons.toJSONString());
} finally {
file.flush();
file.close();
}
} catch (FileNotFoundException e) {
logger.error(e.getMessage());
} catch (IOException e) {
logger.error(e.getMessage());
} catch (ParseException e) {
logger.error(e.getMessage());
}
}
private static JSONObject getCurseProjectJson(Long projectID, String projectName, JSONParser projectParser)
throws ParseException, IOException {
try {
return (JSONObject) projectParser.parse(new BufferedReader(new InputStreamReader(
new URL("http://widget.mcf.li/mc-mods/minecraft/" + projectName + ".json").openStream())));
} catch (FileNotFoundException e) {
return (JSONObject) projectParser.parse(new BufferedReader(new InputStreamReader(
new URL("http://widget.mcf.li/mc-mods/minecraft/" + projectID + "-" + projectName + ".json")
.openStream())));
}
}
private static Date parseDate(String date) {
Date d = null;
if (date != null) {
for (String parse : formats) {
SimpleDateFormat sdf = new SimpleDateFormat(parse);
try {
d = sdf.parse(date);
} catch (java.text.ParseException e) {
}
}
}
return d;
}
} |
package com.orangebot.pitch.strats;
import com.orangebot.pitch.CardGame.Card;
import com.orangebot.pitch.CardGame.Rank;
import com.orangebot.pitch.PitchGame.PlayedCard;
import com.orangebot.pitch.PitchGame.Player;
import com.orangebot.pitch.PitchGame.PlayerStrategy;
public class SimpleStrategy implements PlayerStrategy {
@Override
public Card playCard(Player p) {
Card myHighCard = p.getMyHighestCard(true, true, true);
if (p.isLead()) {
// If I have high card, play high card
if (p.isHighCard(myHighCard)) {
return myHighCard;
}
// Try to find a non-point card
Card card = p.getMyLowestCard(false, true, false);
if (card != null) {
return card;
}
// Try to avoid the three
card = p.getMyLowestCard(true, true, false);
if (card != null) {
return card;
}
// Return the lowest card we have
return p.getMyLowestCard(true, true, true);
}
if (p.isHighCard(myHighCard)) {
return myHighCard;
}
PlayedCard highCard = p.getHighestPlayedCard();
if (highCard.getPlayerId().getTeam() == p.getId().getTeam() &&
p.isHighCard(highCard.getCard())) {
// High card is from my partner
// Try to play the three
if (p.hasCard(Rank.THREE)) {
return p.getCard(Rank.THREE);
}
// Try to find a point card
Card card = p.getMyLowestCard(true, false, true);
if (card != null) {
return card;
}
} else {
// High card is not from my partner
// Try to find a non-point card
Card card = p.getMyLowestCard(false, true, false);
if (card != null) {
return card;
}
// Try to avoid the three
card = p.getMyLowestCard(true, true, false);
if (card != null) {
return card;
}
}
// Return the lowest card we have
return p.getMyLowestCard(true, true, true);
}
} |
package com.owz.furry.module.enums;
import com.owz.furry.common.mybatis.EnumTrait;
public enum UserStatusType implements EnumTrait {
INACTIVE(1, ""),
ACTIVATED(2, "");
private int code;
private String text;
private UserStatusType(int code, String text) {
this.code = code;
this.text = text;
}
@Override
public int getCode() {
return 0;
}
public String getText() {
return text;
}
} |
package com.redhat.ukiservices.jdg;
import com.redhat.ukiservices.common.CommonConstants;
import com.redhat.ukiservices.jdg.model.HEElementModel;
import io.vertx.core.Context;
import io.vertx.core.Vertx;
import io.vertx.core.eventbus.Message;
import io.vertx.core.eventbus.MessageConsumer;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import io.vertx.core.logging.Logger;
import io.vertx.core.logging.LoggerFactory;
public class JDGPutVerticle extends AbstractJDGVerticle {
private static final Logger log = LoggerFactory.getLogger("JDGPutVerticle");
@Override
public void init(Vertx vertx, Context context) {
super.init(vertx, context);
}
@Override
public void start() throws Exception {
super.start();
MessageConsumer<JsonArray> ebConsumer = vertx.eventBus()
.consumer(CommonConstants.VERTX_EVENT_BUS_HE_RSS_JDG_PUT);
ebConsumer.handler(this::processEntries);
}
private void processEntries(Message<JsonArray> message) {
JsonArray entries = message.body();
for (Object obj : entries.getList()) {
JsonObject jobj = (JsonObject) obj;
HEElementModel model = gson.fromJson(jobj.toString(), HEElementModel.class);
remoteCache.put(model.getGuid(), model);
}
}
} |
package com.sandwell.JavaSimulation3D;
import static com.sandwell.JavaSimulation.Util.formatNumber;
import java.util.ArrayList;
import java.util.HashMap;
import com.jaamsim.input.InputAgent;
import com.jaamsim.math.Color4d;
import com.sandwell.JavaSimulation.BooleanInput;
import com.sandwell.JavaSimulation.BooleanListInput;
import com.sandwell.JavaSimulation.BooleanVector;
import com.sandwell.JavaSimulation.ColourInput;
import com.sandwell.JavaSimulation.DoubleInput;
import com.sandwell.JavaSimulation.DoubleListInput;
import com.sandwell.JavaSimulation.DoubleVector;
import com.sandwell.JavaSimulation.EntityInput;
import com.sandwell.JavaSimulation.EntityListInput;
import com.sandwell.JavaSimulation.ErrorException;
import com.sandwell.JavaSimulation.FileEntity;
import com.sandwell.JavaSimulation.Input;
import com.sandwell.JavaSimulation.InputErrorException;
import com.sandwell.JavaSimulation.IntegerVector;
import com.sandwell.JavaSimulation.Keyword;
import com.sandwell.JavaSimulation.ProbabilityDistribution;
import com.sandwell.JavaSimulation.Process;
import com.sandwell.JavaSimulation.Tester;
import com.sandwell.JavaSimulation.Vector;
/**
* Class ModelEntity - JavaSimulation3D
*/
public class ModelEntity extends DisplayEntity {
// Breakdowns
@Keyword(desc = "Reliability is defined as:\n" +
" 100% - (plant breakdown time / total operation time)\n " +
"or\n " +
"(Operational Time)/(Breakdown + Operational Time)",
example = "Object1 Reliability { 0.95 }")
private final DoubleInput availability;
protected double hoursForNextFailure; // The number of working hours required before the next breakdown
protected double iATFailure; // inter arrival time between failures
protected boolean breakdownPending; // true when a breakdown is to occur
protected boolean brokendown; // true => entity is presently broken down
protected boolean maintenance; // true => entity is presently in maintenance
protected boolean associatedBreakdown; // true => entity is presently in Associated Breakdown
protected boolean associatedMaintenance; // true => entity is presently in Associated Maintenance
protected double breakdownStartTime; // Start time of the most recent breakdown
protected double breakdownEndTime; // End time of the most recent breakdown
// Breakdown Probability Distributions
@Keyword(desc = "A ProbabilityDistribution object that governs the duration of breakdowns (in hours).",
example = "Object1 DowntimeDurationDistribution { BreakdownProbDist1 }")
private final EntityInput<ProbabilityDistribution> downtimeDurationDistribution;
@Keyword(desc = "A ProbabilityDistribution object that governs when breakdowns occur (in hours).",
example = "Object1 DowntimeIATDistribution { BreakdownProbDist1 }")
private final EntityInput<ProbabilityDistribution> downtimeIATDistribution;
// Maintenance
@Keyword(desc = "The simulation time for the start of the first maintenance for each maintenance cycle.",
example = "Object1 FirstMaintenanceTime { 24 h }")
protected DoubleListInput firstMaintenanceTimes;
@Keyword(desc = "The time between maintenance activities for each maintenance cycle",
example = "Object1 MaintenanceInterval { 168 h }")
protected DoubleListInput maintenanceIntervals;
@Keyword(desc = "The durations of a single maintenance event for each maintenance cycle.",
example = "Object1 MaintenanceDuration { 336 h }")
protected DoubleListInput maintenanceDurations;
protected IntegerVector maintenancePendings; // Number of maintenance periods that are due
@Keyword(desc = "A Boolean value. Allows scheduled maintenances to be skipped if it overlaps " +
"with another planned maintenance event.",
example = "Object1 SkipMaintenanceIfOverlap { TRUE }")
protected BooleanListInput skipMaintenanceIfOverlap;
@Keyword(desc = "A list of objects that share the maintenance schedule with this object. " +
"In order for the maintenance to start, all objects on this list must be available." +
"This keyword is for Handlers and Signal Blocks only.",
example = "Block1 SharedMaintenance { Block2 Block2 }")
private final EntityListInput<ModelEntity> sharedMaintenanceList;
protected ModelEntity masterMaintenanceEntity; // The entity that has maintenance information
protected boolean performMaintenanceAfterShipDelayPending; // maintenance needs to be done after shipDelay
// Maintenance based on hours of operations
@Keyword(desc = "Working time for the start of the first maintenance for each maintenance cycle",
example = "Object1 FirstMaintenanceOperatingHours { 1000 2500 h }")
private final DoubleListInput firstMaintenanceOperatingHours;
@Keyword(desc = "Working time between one maintenance event and the next for each maintenance cycle",
example = "Object1 MaintenanceOperatingHoursIntervals { 2000 5000 h }")
private final DoubleListInput maintenanceOperatingHoursIntervals;
@Keyword(desc = "Duration of maintenance events based on working hours for each maintenance cycle",
example = "Ship1 MaintenanceOperatingHoursDurations { 24 48 h }")
private final DoubleListInput maintenanceOperatingHoursDurations;
protected IntegerVector maintenanceOperatingHoursPendings; // Number of maintenance periods that are due
protected DoubleVector hoursForNextMaintenanceOperatingHours;
protected double maintenanceStartTime; // Start time of the most recent maintenance
protected double maintenanceEndTime; // End time of the most recent maintenance
protected DoubleVector nextMaintenanceTimes; // next start time for each maintenance
protected double nextMaintenanceDuration; // duration for next maintenance
protected DoubleVector lastScheduledMaintenanceTimes;
@Keyword(desc = "If maintenance has been deferred by the DeferMaintenanceLookAhead keyword " +
"for longer than this time, the maintenance will start even if " +
"there is an object within the lookahead. There must be one entry for each " +
"defined maintenance schedule if DeferMaintenanceLookAhead is used. This" +
"keyword is only used for signal blocks.",
example = "Object1 DeferMaintenanceLimit { 50 50 h }")
private final DoubleListInput deferMaintenanceLimit;
@Keyword(desc = "If the duration of the downtime is longer than this time, equipment will be released",
example = "Object1 DowntimeToReleaseEquipment { 1.0 h }")
protected final DoubleInput downtimeToReleaseEquipment;
@Keyword(desc = "A list of Boolean values corresponding to the maintenance cycles. If a value is TRUE, " +
"then routes/tasks are released before performing the maintenance in the cycle.",
example = "Object1 ReleaseEquipment { TRUE FALSE FALSE }")
protected final BooleanListInput releaseEquipment;
@Keyword(desc = "A list of Boolean values corresponding to the maintenance cycles. If a value is " +
"TRUE, then maintenance in the cycle can start even if the equipment is presently " +
"working.",
example = "Object1 ForceMaintenance { TRUE FALSE FALSE }")
protected final BooleanListInput forceMaintenance;
// Statistics
@Keyword(desc = "If TRUE, then statistics for this object are " +
"included in the main output report.",
example = "Object1 PrintToReport { TRUE }")
private final BooleanInput printToReport;
// States
private static Vector stateList = new Vector( 11, 1 ); // List of valid states
private final HashMap<String, StateRecord> stateMap;
protected double workingHours; // Accumulated working time spent in working states
private double timeOfLastStateChange;
private int numberOfCompletedCycles;
protected double lastHistogramUpdateTime; // Last time at which a histogram was updated for this entity
protected double secondToLastHistogramUpdateTime; // Second to last time at which a histogram was updated for this entity
private StateRecord presentState; // The present state of the entity
protected FileEntity stateReportFile; // The file to store the state information
private String finalLastState = ""; // The final state of the entity (in a sequence of transitional states)
private double timeOfLastPrintedState = 0; // The time that the last state printed in the trace state file
// Graphics
protected final static Color4d breakdownColor = ColourInput.DARK_RED; // Color of the entity in breaking down
protected final static Color4d maintenanceColor = ColourInput.RED; // Color of the entity in maintenance
static {
stateList.addElement( "Idle" );
stateList.addElement( "Working" );
stateList.addElement( "Breakdown" );
stateList.addElement( "Maintenance" );
}
{
maintenanceDurations = new DoubleListInput("MaintenanceDurations", "Maintenance", new DoubleVector());
maintenanceDurations.setValidRange(0.0d, Double.POSITIVE_INFINITY);
maintenanceDurations.setUnits("h");
this.addInput(maintenanceDurations, true, "MaintenanceDuration");
maintenanceIntervals = new DoubleListInput("MaintenanceIntervals", "Maintenance", new DoubleVector());
maintenanceIntervals.setValidRange(0.0d, Double.POSITIVE_INFINITY);
maintenanceIntervals.setUnits("h");
this.addInput(maintenanceIntervals, true, "MaintenanceInterval");
firstMaintenanceTimes = new DoubleListInput("FirstMaintenanceTimes", "Maintenance", new DoubleVector());
firstMaintenanceTimes.setValidRange(0.0d, Double.POSITIVE_INFINITY);
firstMaintenanceTimes.setUnits("h");
this.addInput(firstMaintenanceTimes, true, "FirstMaintenanceTime");
forceMaintenance = new BooleanListInput("ForceMaintenance", "Maintenance", null);
this.addInput(forceMaintenance, true);
releaseEquipment = new BooleanListInput("ReleaseEquipment", "Maintenance", null);
this.addInput(releaseEquipment, true);
availability = new DoubleInput("Reliability", "Breakdowns", 1.0d, 0.0d, 1.0d);
this.addInput(availability, true);
downtimeIATDistribution = new EntityInput<ProbabilityDistribution>(ProbabilityDistribution.class, "DowntimeIATDistribution", "Breakdowns", null);
this.addInput(downtimeIATDistribution, true);
downtimeDurationDistribution = new EntityInput<ProbabilityDistribution>(ProbabilityDistribution.class, "DowntimeDurationDistribution", "Breakdowns", null);
this.addInput(downtimeDurationDistribution, true);
downtimeToReleaseEquipment = new DoubleInput("DowntimeToReleaseEquipment", "Breakdowns", 0.0d, 0.0d, Double.POSITIVE_INFINITY);
this.addInput(downtimeToReleaseEquipment, true);
skipMaintenanceIfOverlap = new BooleanListInput("SkipMaintenanceIfOverlap", "Maintenance", new BooleanVector());
this.addInput(skipMaintenanceIfOverlap, true);
deferMaintenanceLimit = new DoubleListInput("DeferMaintenanceLimit", "Maintenance", null);
deferMaintenanceLimit.setValidRange(0.0d, Double.POSITIVE_INFINITY);
deferMaintenanceLimit.setUnits("h");
this.addInput(deferMaintenanceLimit, true);
sharedMaintenanceList = new EntityListInput<ModelEntity>(ModelEntity.class, "SharedMaintenance", "Maintenance", new ArrayList<ModelEntity>(0));
this.addInput(sharedMaintenanceList, true);
firstMaintenanceOperatingHours = new DoubleListInput("FirstMaintenanceOperatingHours", "Maintenance", new DoubleVector());
firstMaintenanceOperatingHours.setValidRange(0.0d, Double.POSITIVE_INFINITY);
firstMaintenanceOperatingHours.setUnits("h");
this.addInput(firstMaintenanceOperatingHours, true);
maintenanceOperatingHoursDurations = new DoubleListInput("MaintenanceOperatingHoursDurations", "Maintenance", new DoubleVector());
maintenanceOperatingHoursDurations.setValidRange(1e-15, Double.POSITIVE_INFINITY);
maintenanceOperatingHoursDurations.setUnits("h");
this.addInput(maintenanceOperatingHoursDurations, true);
maintenanceOperatingHoursIntervals = new DoubleListInput("MaintenanceOperatingHoursIntervals", "Maintenance", new DoubleVector());
maintenanceOperatingHoursIntervals.setValidRange(1e-15, Double.POSITIVE_INFINITY);
maintenanceOperatingHoursIntervals.setUnits("h");
this.addInput(maintenanceOperatingHoursIntervals, true);
printToReport = new BooleanInput("PrintToReport", "Report", true);
this.addInput(printToReport, true);
}
public ModelEntity() {
lastHistogramUpdateTime = 0.0;
secondToLastHistogramUpdateTime = 0.0;
hoursForNextFailure = 0.0;
iATFailure = 0.0;
maintenancePendings = new IntegerVector( 1, 1 );
maintenanceOperatingHoursPendings = new IntegerVector( 1, 1 );
hoursForNextMaintenanceOperatingHours = new DoubleVector( 1, 1 );
performMaintenanceAfterShipDelayPending = false;
lastScheduledMaintenanceTimes = new DoubleVector();
breakdownStartTime = 0.0;
breakdownEndTime = Double.POSITIVE_INFINITY;
breakdownPending = false;
brokendown = false;
associatedBreakdown = false;
maintenanceStartTime = 0.0;
maintenanceEndTime = Double.POSITIVE_INFINITY;
maintenance = false;
associatedMaintenance = false;
workingHours = 0.0;
stateMap = new HashMap<String, StateRecord>();
StateRecord idle = new StateRecord("Idle", 0);
stateMap.put("idle" , idle);
presentState = idle;
timeOfLastStateChange = getCurrentTime();
idle.lastStartTimeInState = getCurrentTime();
idle.secondLastStartTimeInState = getCurrentTime();
initStateMap();
}
/**
* Clear internal properties
*/
public void clearInternalProperties() {
hoursForNextFailure = 0.0;
performMaintenanceAfterShipDelayPending = false;
breakdownPending = false;
brokendown = false;
associatedBreakdown = false;
maintenance = false;
associatedMaintenance = false;
workingHours = 0.0;
}
@Override
public void validate()
throws InputErrorException {
super.validate();
this.validateMaintenance();
Input.validateIndexedLists(firstMaintenanceOperatingHours.getValue(), maintenanceOperatingHoursIntervals.getValue(), "FirstMaintenanceOperatingHours", "MaintenanceOperatingHoursIntervals");
Input.validateIndexedLists(firstMaintenanceOperatingHours.getValue(), maintenanceOperatingHoursDurations.getValue(), "FirstMaintenanceOperatingHours", "MaintenanceOperatingHoursDurations");
if( getAvailability() < 1.0 ) {
if( getDowntimeDurationDistribution() == null ) {
throw new InputErrorException("When availability is less than one you must define downtimeDurationDistribution in your input file!");
}
}
if( downtimeIATDistribution.getValue() != null ) {
if( getDowntimeDurationDistribution() == null ) {
throw new InputErrorException("When DowntimeIATDistribution is set, DowntimeDurationDistribution must also be set.");
}
}
if( skipMaintenanceIfOverlap.getValue().size() > 0 )
Input.validateIndexedLists(firstMaintenanceTimes.getValue(), skipMaintenanceIfOverlap.getValue(), "FirstMaintenanceTimes", "SkipMaintenanceIfOverlap");
if( releaseEquipment.getValue() != null )
Input.validateIndexedLists(firstMaintenanceTimes.getValue(), releaseEquipment.getValue(), "FirstMaintenanceTimes", "ReleaseEquipment");
if( forceMaintenance.getValue() != null ) {
Input.validateIndexedLists(firstMaintenanceTimes.getValue(), forceMaintenance.getValue(), "FirstMaintenanceTimes", "ForceMaintenance");
}
if(downtimeDurationDistribution.getValue() != null &&
downtimeDurationDistribution.getValue().getMinimumValue() < 0)
throw new InputErrorException("DowntimeDurationDistribution cannot allow negative values");
if(downtimeIATDistribution.getValue() != null &&
downtimeIATDistribution.getValue().getMinimumValue() < 0)
throw new InputErrorException("DowntimeIATDistribution cannot allow negative values");
}
@Override
public void earlyInit() {
super.earlyInit();
if( downtimeDurationDistribution.getValue() != null ) {
downtimeDurationDistribution.getValue().initialize();
}
if( downtimeIATDistribution.getValue() != null ) {
downtimeIATDistribution.getValue().initialize();
}
}
public int getNumberOfCompletedCycles() {
return numberOfCompletedCycles;
}
// INPUT
public void validateMaintenance() {
Input.validateIndexedLists(firstMaintenanceTimes.getValue(), maintenanceIntervals.getValue(), "FirstMaintenanceTimes", "MaintenanceIntervals");
Input.validateIndexedLists(firstMaintenanceTimes.getValue(), maintenanceDurations.getValue(), "FirstMaintenanceTimes", "MaintenanceDurations");
for( int i = 0; i < maintenanceIntervals.getValue().size(); i++ ) {
if( maintenanceIntervals.getValue().get( i ) < maintenanceDurations.getValue().get( i ) ) {
throw new InputErrorException("MaintenanceInterval should be greater than MaintenanceDuration (%f) <= (%f)",
maintenanceIntervals.getValue().get(i), maintenanceDurations.getValue().get(i));
}
}
}
// INITIALIZATION METHODS
public void clearStatistics() {
for( int i = 0; i < getMaintenanceOperatingHoursIntervals().size(); i++ ) {
hoursForNextMaintenanceOperatingHours.set( i, hoursForNextMaintenanceOperatingHours.get( i ) - this.getWorkingHours() );
}
// Determine the time for the first breakdown event
/*if ( downtimeIATDistribution == null ) {
if( breakdownSeed != 0 ) {
breakdownRandGen.initialiseWith( breakdownSeed );
hoursForNextFailure = breakdownRandGen.getUniformFrom_To( 0.5*iATFailure, 1.5*iATFailure );
} else {
hoursForNextFailure = getNextBreakdownIAT();
}
}
else {
hoursForNextFailure = getNextBreakdownIAT();
}*/
}
/**
* *!*!*!*! OVERLOAD !*!*!*!*
* Initialize statistics
*/
public void initialize() {
brokendown = false;
maintenance = false;
associatedBreakdown = false;
associatedMaintenance = false;
// Create state trace file if required
if (testFlag(FLAG_TRACESTATE)) {
String fileName = InputAgent.getReportDirectory() + InputAgent.getRunName() + "-" + this.getName() + ".trc";
stateReportFile = new FileEntity( fileName, FileEntity.FILE_WRITE, false );
}
workingHours = 0.0;
// Calculate the average downtime duration if distributions are used
double average = 0.0;
if(getDowntimeDurationDistribution() != null)
average = getDowntimeDurationDistribution().getExpectedValue();
// Calculate the average downtime inter-arrival time
if( (getAvailability() == 1.0 || average == 0.0) ) {
iATFailure = 10.0E10;
}
else {
if( getDowntimeIATDistribution() != null ) {
iATFailure = getDowntimeIATDistribution().getExpectedValue();
// Adjust the downtime inter-arrival time to get the specified availability
if( ! Tester.equalCheckTolerance( iATFailure, ( (average / (1.0 - getAvailability())) - average ) ) ) {
getDowntimeIATDistribution().setValueFactor_For( ( (average / (1.0 - getAvailability())) - average) / iATFailure, this );
iATFailure = getDowntimeIATDistribution().getExpectedValue();
}
}
else {
iATFailure = ( (average / (1.0 - getAvailability())) - average );
}
}
// Determine the time for the first breakdown event
hoursForNextFailure = getNextBreakdownIAT();
this.setPresentState( "Idle" );
brokendown = false;
// Start the maintenance network
if( firstMaintenanceTimes.getValue().size() != 0 ) {
maintenancePendings.fillWithEntriesOf( firstMaintenanceTimes.getValue().size(), 0 );
lastScheduledMaintenanceTimes.fillWithEntriesOf( firstMaintenanceTimes.getValue().size(), Double.POSITIVE_INFINITY );
this.doMaintenanceNetwork();
}
// calculate hours for first operating hours breakdown
for ( int i = 0; i < getMaintenanceOperatingHoursIntervals().size(); i++ ) {
hoursForNextMaintenanceOperatingHours.add( firstMaintenanceOperatingHours.getValue().get( i ) );
maintenanceOperatingHoursPendings.add( 0 );
}
}
// ACCESSOR METHODS
/**
* Return the time at which the most recent maintenance is scheduled to end
*/
public double getMaintenanceEndTime() {
return maintenanceEndTime;
}
/**
* Return the time at which a the most recent breakdown is scheduled to end
*/
public double getBreakdownEndTime() {
return breakdownEndTime;
}
public double getTimeOfLastStateChange() {
return timeOfLastStateChange;
}
/**
* Returns the availability proportion.
*/
public double getAvailability() {
return availability.getValue();
}
public DoubleListInput getFirstMaintenanceTimes() {
return firstMaintenanceTimes;
}
public boolean getPrintToReport() {
return printToReport.getValue();
}
/**
* Return true if the entity is working
*/
public boolean isWorking() {
return false;
}
public boolean isBrokendown() {
return brokendown;
}
public boolean isBreakdownPending() {
return breakdownPending;
}
public boolean isInAssociatedBreakdown() {
return associatedBreakdown;
}
public boolean isInMaintenance() {
return maintenance;
}
public boolean isInAssociatedMaintenance() {
return associatedMaintenance;
}
public boolean isInService() {
return ( brokendown || maintenance || associatedBreakdown || associatedMaintenance );
}
public void setBrokendown( boolean bool ) {
brokendown = bool;
this.setPresentState();
}
public void setMaintenance( boolean bool ) {
maintenance = bool;
this.setPresentState();
}
public void setAssociatedBreakdown( boolean bool ) {
associatedBreakdown = bool;
}
public void setAssociatedMaintenance( boolean bool ) {
associatedMaintenance = bool;
}
public ProbabilityDistribution getDowntimeDurationDistribution() {
return downtimeDurationDistribution.getValue();
}
public double getDowntimeToReleaseEquipment() {
return downtimeToReleaseEquipment.getValue();
}
public boolean hasServiceDefined() {
return( maintenanceDurations.getValue().size() > 0 || getDowntimeDurationDistribution() != null );
}
// HOURS AND STATES
public static class StateRecord {
String stateName;
int index;
double initializationHours;
double totalHours;
double completedCycleHours;
double currentCycleHours;
double lastStartTimeInState;
double secondLastStartTimeInState;
public StateRecord(String state, int i) {
stateName = state;
index = i;
}
public int getIndex() {
return index;
}
public String getStateName() {
return stateName;
}
public double getTotalHours() {
return totalHours;
}
public double getCompletedCycleHours() {
return completedCycleHours;
}
public double getCurrentCycleHours() {
return currentCycleHours;
}
public double getLastStartTimeInState() {
return lastStartTimeInState;
}
public double getSecondLastStartTimeInState() {
return secondLastStartTimeInState;
}
@Override
public String toString() {
return getStateName();
}
}
public void initStateMap() {
// Populate the hash map for the states and StateRecord
StateRecord idle = getStateRecordFor("Idle");
stateMap.clear();
for (int i = 0; i < getStateList().size(); i++) {
String state = (String)getStateList().get(i);
if ( state.equals("Idle") ) {
idle.index = i;
continue;
}
StateRecord stateRecord = new StateRecord(state, i);
stateMap.put(state.toLowerCase() , stateRecord);
}
stateMap.put("idle", idle);
timeOfLastStateChange = getCurrentTime();
}
/**
* Runs after initialization period
*/
public void collectInitializationStats() {
collectPresentHours();
for ( StateRecord each : stateMap.values() ) {
each.initializationHours = each.getTotalHours();
each.totalHours = 0.0d;
each.completedCycleHours = 0.0d;
}
numberOfCompletedCycles = 0;
}
/**
* Runs when cycle is finished
*/
public void collectCycleStats() {
collectPresentHours();
// finalize cycle for each state record
for ( StateRecord each : stateMap.values() ) {
each.completedCycleHours += each.getCurrentCycleHours();
each.currentCycleHours = 0.0d;
}
numberOfCompletedCycles++;
}
/**
* Clear the current cycle hours
*/
protected void clearCurrentCycleHours() {
collectPresentHours();
// clear current cycle hours for each state record
for ( StateRecord each : stateMap.values() )
each.currentCycleHours = 0.0d;
}
/**
* Runs after each report interval
*/
public void clearReportStats() {
// clear totalHours for each state record
for ( StateRecord each : stateMap.values() ) {
each.totalHours = 0.0d;
each.completedCycleHours = 0.0d;
}
numberOfCompletedCycles = 0;
}
/**
* Update the hours for the present state and set new timeofLastStateChange
*/
private void collectPresentHours() {
double curTime = getCurrentTime();
if (curTime == timeOfLastStateChange)
return;
double duration = curTime - timeOfLastStateChange;
timeOfLastStateChange = curTime;
presentState.totalHours += duration;
presentState.currentCycleHours += duration;
if (this.isWorking())
workingHours += duration;
}
/**
* Updates the statistics, then sets the present status to be the specified value.
*/
public void setPresentState( String state ) {
if (traceFlag) {
this.trace("setState( " + state + " )");
this.traceLine(" Old State = " + getPresentState());
}
if (presentStateEquals(state))
return;
if (testFlag(FLAG_TRACESTATE)) this.printStateTrace(state);
StateRecord nextState = this.getStateRecordFor(state);
if (nextState == null)
throw new ErrorException(this + " Specified state: " + state + " was not found in the StateList: " + this.getStateList());
collectPresentHours();
nextState.secondLastStartTimeInState = nextState.getLastStartTimeInState();
nextState.lastStartTimeInState = timeOfLastStateChange;
presentState = nextState;
}
public StateRecord getStateRecordFor(String state) {
return stateMap.get(state.toLowerCase());
}
private StateRecord getStateRecordFor(int index) {
String state = (String)getStateList().get(index);
return getStateRecordFor(state);
}
public double getTotalHoursFor(StateRecord state) {
double hours = state.getTotalHours();
if (presentState == state)
hours += getCurrentTime() - timeOfLastStateChange;
return hours;
}
public double getTotalHoursFor(String state) {
StateRecord rec = getStateRecordFor(state);
return getTotalHoursFor(rec);
}
public double getTotalHoursFor(int index) {
return getTotalHoursFor( (String) getStateList().get(index) );
}
public double getTotalHours() {
double total = getCurrentTime() - timeOfLastStateChange;
for (int i = 0; i < getNumberOfStates(); i++)
total += getStateRecordFor(i).getTotalHours();
return total;
}
public double getCompletedCycleHoursFor(String state) {
return getStateRecordFor(state).getCompletedCycleHours();
}
public double getCompletedCycleHoursFor(int index) {
return getStateRecordFor(index).getCompletedCycleHours();
}
public double getCompletedCycleHours() {
double total = 0.0d;
for (int i = 0; i < getStateList().size(); i ++)
total += getStateRecordFor(i).getCompletedCycleHours();
return total;
}
public double getCurrentCycleHoursFor(StateRecord state) {
double hours = state.getCurrentCycleHours();
if (presentState == state)
hours += getCurrentTime() - timeOfLastStateChange;
return hours;
}
/**
* Returns the amount of time spent in the specified state in current cycle
*/
public double getCurrentCycleHoursFor( String state ) {
StateRecord rec = getStateRecordFor(state);
return getCurrentCycleHoursFor(rec);
}
/**
* Return spent hours for a given state at the index in stateList for current cycle
*/
public double getCurrentCycleHoursFor(int index) {
StateRecord rec = getStateRecordFor(index);
return getCurrentCycleHoursFor(rec);
}
/**
* Return the total hours in current cycle for all the states
*/
public double getCurrentCycleHours() {
double total = getCurrentTime() - timeOfLastStateChange;
for (int i = 0; i < getNumberOfStates(); i++) {
total += getStateRecordFor(i).getCurrentCycleHours();
}
return total;
}
/**
* Returns the present state name
*/
public String getPresentState() {
return presentState.getStateName();
}
public boolean presentStateEquals(String state) {
return getPresentState().equals(state);
}
public boolean presentStateMatches(String state) {
return getPresentState().equalsIgnoreCase(state);
}
public boolean presentStateStartsWith(String prefix) {
return getPresentState().startsWith(prefix);
}
public boolean presentStateEndsWith(String suffix) {
return getPresentState().endsWith(suffix);
}
protected int getPresentStateIndex() {
return presentState.getIndex();
}
public void setPresentState() {}
/**
* Print that state information on the trace state log file
*/
public void printStateTrace( String state ) {
// First state ever
if( finalLastState.equals("") ) {
finalLastState = state;
stateReportFile.putString(String.format("%.5f %s.setState( \"%s\" ) dt = %s\n",
0.0d, this.getName(), getPresentState(), formatNumber(getCurrentTime())));
stateReportFile.flush();
timeOfLastPrintedState = getCurrentTime();
}
else {
// The final state in a sequence from the previous state change (one step behind)
if ( ! Tester.equalCheckTimeStep( timeOfLastPrintedState, getCurrentTime() ) ) {
stateReportFile.putString(String.format("%.5f %s.setState( \"%s\" ) dt = %s\n",
timeOfLastPrintedState, this.getName(), finalLastState, formatNumber(getCurrentTime() - timeOfLastPrintedState)));
// for( int i = 0; i < stateTraceRelatedModelEntities.size(); i++ ) {
// ModelEntitiy each = (ModelEntitiy) stateTraceRelatedModelEntities.get( i );
// putString( )
stateReportFile.flush();
timeOfLastPrintedState = getCurrentTime();
}
finalLastState = state;
}
}
/**
* Set the last time a histogram was updated for this entity
*/
public void setLastHistogramUpdateTime( double time ) {
secondToLastHistogramUpdateTime = lastHistogramUpdateTime;
lastHistogramUpdateTime = time;
}
/**
* Returns the time from the start of the start state to the start of the end state
*/
public double getTimeFromStartState_ToEndState( String startState, String endState) {
// Determine the index of the start state
StateRecord startStateRec = this.getStateRecordFor(startState);
if (startStateRec == null) {
throw new ErrorException("Specified state: %s was not found in the StateList.", startState);
}
// Determine the index of the end state
StateRecord endStateRec = this.getStateRecordFor(endState);
if (endStateRec == null) {
throw new ErrorException("Specified state: %s was not found in the StateList.", endState);
}
// Is the start time of the end state greater or equal to the start time of the start state?
if (endStateRec.getLastStartTimeInState() >= startStateRec.getLastStartTimeInState()) {
// If either time was not in the present cycle, return NaN
if (endStateRec.getLastStartTimeInState() <= lastHistogramUpdateTime ||
startStateRec.getLastStartTimeInState() <= lastHistogramUpdateTime ) {
return Double.NaN;
}
// Return the time from the last start time of the start state to the last start time of the end state
return endStateRec.getLastStartTimeInState() - startStateRec.getLastStartTimeInState();
}
else {
// If either time was not in the present cycle, return NaN
if (endStateRec.getLastStartTimeInState() <= lastHistogramUpdateTime ||
startStateRec.getSecondLastStartTimeInState() <= secondToLastHistogramUpdateTime ) {
return Double.NaN;
}
// Return the time from the second to last start time of the start date to the last start time of the end state
return endStateRec.getLastStartTimeInState() - startStateRec.getSecondLastStartTimeInState();
}
}
/**
* Return the commitment
*/
public double getCommitment() {
return 1.0 - this.getFractionOfTimeForState( "Idle" );
}
/**
* Return the fraction of time for the given status
*/
public double getFractionOfTimeForState( String aState ) {
if( getTotalHours() > 0.0 ) {
return ((this.getTotalHoursFor( aState ) / getTotalHours()) );
}
else {
return 0.0;
}
}
/**
* Return the percentage of time for the given status
*/
public double getPercentageOfTimeForState( String aState ) {
if( getTotalHours() > 0.0 ) {
return ((this.getTotalHoursFor( aState ) / getTotalHours()) * 100.0);
}
else {
return 0.0;
}
}
/**
* Returns the number of hours the entity is in use.
* *!*!*!*! OVERLOAD !*!*!*!*
*/
public double getWorkingHours() {
double hours = 0.0d;
if ( this.isWorking() )
hours = getCurrentTime() - timeOfLastStateChange;
return workingHours + hours;
}
public Vector getStateList() {
return stateList;
}
/**
* Return total number of states
*/
public int getNumberOfStates() {
return stateMap.size();
}
// MAINTENANCE METHODS
/**
* Perform tasks required before a maintenance period
*/
public void doPreMaintenance() {
//@debug@ cr 'Entity should be overloaded' print
}
/**
* Start working again following a breakdown or maintenance period
*/
public void restart() {
//@debug@ cr 'Entity should be overloaded' print
}
/**
* Disconnect routes, release truck assignments, etc. when performing maintenance or breakdown
*/
public void releaseEquipment() {}
public boolean releaseEquipmentForMaintenanceSchedule( int index ) {
if( releaseEquipment.getValue() == null )
return true;
return releaseEquipment.getValue().get( index );
}
public boolean forceMaintenanceSchedule( int index ) {
if( forceMaintenance.getValue() == null )
return false;
return forceMaintenance.getValue().get( index );
}
/**
* Perform all maintenance schedules that are due
*/
public void doMaintenance() {
// scheduled maintenance
for( int index = 0; index < maintenancePendings.size(); index++ ) {
if( this.getMaintenancePendings().get( index ) > 0 ) {
if( traceFlag ) this.trace( "Starting Maintenance Schedule: " + index );
this.doMaintenance(index);
}
}
// Operating hours maintenance
for( int index = 0; index < maintenanceOperatingHoursPendings.size(); index++ ) {
if( this.getWorkingHours() > hoursForNextMaintenanceOperatingHours.get( index ) ) {
hoursForNextMaintenanceOperatingHours.set(index, this.getWorkingHours() + getMaintenanceOperatingHoursIntervals().get( index ));
maintenanceOperatingHoursPendings.addAt( 1, index );
this.doMaintenanceOperatingHours(index);
}
}
}
/**
* Perform all the planned maintenance that is due for the given schedule
*/
public void doMaintenance( int index ) {
double wait;
if( masterMaintenanceEntity != null ) {
wait = masterMaintenanceEntity.getMaintenanceDurations().getValue().get( index );
}
else {
wait = this.getMaintenanceDurations().getValue().get( index );
}
if( wait > 0.0 && maintenancePendings.get( index ) != 0 ) {
if( traceFlag ) this.trace( "ModelEntity.doMaintenance_Wait() -- start of maintenance" );
// Keep track of the start and end of maintenance times
maintenanceStartTime = getCurrentTime();
if( masterMaintenanceEntity != null ) {
maintenanceEndTime = maintenanceStartTime + ( maintenancePendings.get( index ) * masterMaintenanceEntity.getMaintenanceDurations().getValue().get( index ) );
}
else {
maintenanceEndTime = maintenanceStartTime + ( maintenancePendings.get( index ) * maintenanceDurations.getValue().get( index ) );
}
this.setPresentState( "Maintenance" );
maintenance = true;
this.doPreMaintenance();
// Release equipment if necessary
if( this.releaseEquipmentForMaintenanceSchedule( index ) ) {
this.releaseEquipment();
}
while( maintenancePendings.get( index ) != 0 ) {
maintenancePendings.subAt( 1, index );
scheduleWait( wait );
// If maintenance pending goes negative, something is wrong
if( maintenancePendings.get( index ) < 0 ) {
this.error( "ModelEntity.doMaintenance_Wait()", "Maintenace pending should not be negative", "maintenacePending = "+maintenancePendings.get( index ) );
}
}
if( traceFlag ) this.trace( "ModelEntity.doMaintenance_Wait() -- end of maintenance" );
// The maintenance is over
this.setPresentState( "Idle" );
maintenance = false;
this.restart();
}
}
/**
* Perform all the planned maintenance that is due
*/
public void doMaintenanceOperatingHours( int index ) {
if(maintenanceOperatingHoursPendings.get( index ) == 0 )
return;
if( traceFlag ) this.trace( "ModelEntity.doMaintenance_Wait() -- start of maintenance" );
// Keep track of the start and end of maintenance times
maintenanceStartTime = getCurrentTime();
maintenanceEndTime = maintenanceStartTime +
(maintenanceOperatingHoursPendings.get( index ) * getMaintenanceOperatingHoursDurationFor(index));
this.setPresentState( "Maintenance" );
maintenance = true;
this.doPreMaintenance();
while( maintenanceOperatingHoursPendings.get( index ) != 0 ) {
//scheduleWait( maintenanceDurations.get( index ) );
scheduleWait( maintenanceEndTime - maintenanceStartTime );
maintenanceOperatingHoursPendings.subAt( 1, index );
// If maintenance pending goes negative, something is wrong
if( maintenanceOperatingHoursPendings.get( index ) < 0 ) {
this.error( "ModelEntity.doMaintenance_Wait()", "Maintenace pending should not be negative", "maintenacePending = "+maintenanceOperatingHoursPendings.get( index ) );
}
}
if( traceFlag ) this.trace( "ModelEntity.doMaintenance_Wait() -- end of maintenance" );
// The maintenance is over
maintenance = false;
this.setPresentState( "Idle" );
this.restart();
}
/**
* Check if a maintenance is due. if so, try to perform the maintenance
*/
public boolean checkMaintenance() {
if( traceFlag ) this.trace( "checkMaintenance()" );
if( checkOperatingHoursMaintenance() ) {
return true;
}
// List of all entities going to maintenance
ArrayList<ModelEntity> sharedMaintenanceEntities;
// This is not a master maintenance entity
if( masterMaintenanceEntity != null ) {
sharedMaintenanceEntities = masterMaintenanceEntity.getSharedMaintenanceList();
}
// This is a master maintenance entity
else {
sharedMaintenanceEntities = getSharedMaintenanceList();
}
// If this entity is in shared maintenance relation with a group of entities
if( sharedMaintenanceEntities.size() > 0 || masterMaintenanceEntity != null ) {
// Are all entities in the group ready for maintenance
if( this.areAllEntitiesAvailable() ) {
// For every entity in the shared maintenance list plus the master maintenance entity
for( int i=0; i <= sharedMaintenanceEntities.size(); i++ ) {
ModelEntity aModel;
// Locate master maintenance entity( after all entity in shared maintenance list have been taken care of )
if( i == sharedMaintenanceEntities.size() ) {
// This entity is manster maintenance entity
if( masterMaintenanceEntity == null ) {
aModel = this;
}
// This entity is on the shared maintenannce list of the master maintenance entity
else {
aModel = masterMaintenanceEntity;
}
}
// Next entity in the shared maintenance list
else {
aModel = sharedMaintenanceEntities.get( i );
}
// Check for aModel maintenances
for( int index = 0; index < maintenancePendings.size(); index++ ) {
if( aModel.getMaintenancePendings().get( index ) > 0 ) {
if( traceFlag ) this.trace( "Starting Maintenance Schedule: " + index );
aModel.startProcess("doMaintenance", index);
}
}
}
return true;
}
else {
return false;
}
}
// This block is maintained indipendently
else {
// Check for maintenances
for( int i = 0; i < maintenancePendings.size(); i++ ) {
if( maintenancePendings.get( i ) > 0 ) {
if( this.canStartMaintenance( i ) ) {
if( traceFlag ) this.trace( "Starting Maintenance Schedule: " + i );
this.startProcess("doMaintenance", i);
return true;
}
}
}
}
return false;
}
/**
* Determine how many hours of maintenance is scheduled between startTime and endTime
*/
public double getScheduledMaintenanceHoursForPeriod( double startTime, double endTime ) {
if( traceFlag ) this.trace("Handler.getScheduledMaintenanceHoursForPeriod( "+startTime+", "+endTime+" )" );
double totalHours = 0.0;
double firstTime = 0.0;
// Add on hours for all pending maintenance
for( int i=0; i < maintenancePendings.size(); i++ ) {
totalHours += maintenancePendings.get( i ) * maintenanceDurations.getValue().get( i );
}
if( traceFlag ) this.traceLine( "Hours of pending maintenances="+totalHours );
// Add on hours for all maintenance scheduled to occur in the given period from startTime to endTime
for( int i=0; i < maintenancePendings.size(); i++ ) {
// Find the first time that maintenance is scheduled after startTime
firstTime = firstMaintenanceTimes.getValue().get( i );
while( firstTime < startTime ) {
firstTime += maintenanceIntervals.getValue().get( i );
}
if( traceFlag ) this.traceLine(" first time maintenance "+i+" is scheduled after startTime= "+firstTime );
// Now have the first maintenance start time after startTime
// Add all maintenances that lie in the given interval
while( firstTime < endTime ) {
if( traceFlag ) this.traceLine(" Checking for maintenances for period:"+firstTime+" to "+endTime );
// Add the maintenance
totalHours += maintenanceDurations.getValue().get( i );
// Update the search period
endTime += maintenanceDurations.getValue().get( i );
// Look for next maintenance in new interval
firstTime += maintenanceIntervals.getValue().get( i );
if( traceFlag ) this.traceLine(" Adding Maintenance duration = "+maintenanceDurations.getValue().get( i ) );
}
}
// Return the total hours of maintenance scheduled from startTime to endTime
if( traceFlag ) this.traceLine( "Maintenance hours to add= "+totalHours );
return totalHours;
}
public boolean checkOperatingHoursMaintenance() {
if( traceFlag ) this.trace("checkOperatingHoursMaintenance()");
// Check for maintenances
for( int i = 0; i < getMaintenanceOperatingHoursIntervals().size(); i++ ) {
// If the entity is not available, maintenance cannot start
if( ! this.canStartMaintenance( i ) )
continue;
if( this.getWorkingHours() > hoursForNextMaintenanceOperatingHours.get( i ) ) {
hoursForNextMaintenanceOperatingHours.set(i, (this.getWorkingHours() + getMaintenanceOperatingHoursIntervals().get( i )));
maintenanceOperatingHoursPendings.addAt( 1, i );
if( traceFlag ) this.trace( "Starting Maintenance Operating Hours Schedule : " + i );
this.startProcess("doMaintenanceOperatingHours", i);
return true;
}
}
return false;
}
/**
* Wrapper method for doMaintenance_Wait.
*/
public void doMaintenanceNetwork() {
this.startProcess("doMaintenanceNetwork_Wait");
}
/**
* Network for planned maintenance.
* This method should be called in the initialize method of the specific entity.
*/
public void doMaintenanceNetwork_Wait() {
// Initialize schedules
for( int i=0; i < maintenancePendings.size(); i++ ) {
maintenancePendings.set( i, 0 );
}
nextMaintenanceTimes = new DoubleVector(firstMaintenanceTimes.getValue());
nextMaintenanceDuration = 0;
// Find the next maintenance event
int index = 0;
double earliestTime = Double.POSITIVE_INFINITY;
for( int i=0; i < nextMaintenanceTimes.size(); i++ ) {
double time = nextMaintenanceTimes.get( i );
if( Tester.lessCheckTolerance( time, earliestTime ) ) {
earliestTime = time;
index = i;
nextMaintenanceDuration = maintenanceDurations.getValue().get( i );
}
}
// Make sure that maintenance for entities on the shared list are being called after those entities have been initialize (AT TIME ZERO)
scheduleLastLIFO();
while( true ) {
double dt = earliestTime - getCurrentTime();
// Wait for the maintenance check time
if( dt > Process.getEventTolerance() ) {
scheduleWait( dt );
}
// Increment the number of maintenances due for the entity
maintenancePendings.addAt( 1, index );
// If this is a master maintenance entity
if (getSharedMaintenanceList().size() > 0) {
// If all the entities on the shared list are ready for maintenance
if( this.areAllEntitiesAvailable() ) {
// Put this entity to maintenance
if( traceFlag ) this.trace( "Starting Maintenance Schedule: " + index );
this.startProcess("doMaintenance", index);
}
}
// If this entity is maintained independently
else {
// Do maintenance if possible
if( ! this.isInService() && this.canStartMaintenance( index ) ) {
// if( traceFlag ) this.trace( "doMaintenanceNetwork_Wait: Starting Maintenance. PresentState = "+presentState+" IsAvailable? = "+this.isAvailable() );
if( traceFlag ) this.trace( "Starting Maintenance Schedule: " + index );
this.startProcess("doMaintenance", index);
}
// Keep track of the time the maintenance was attempted
else {
lastScheduledMaintenanceTimes.set( index, getCurrentTime() );
// If skipMaintenance was defined, cancel the maintenance
if( this.shouldSkipMaintenance( index ) ) {
// if a different maintenance is due, cancel this maintenance
boolean cancelMaintenance = false;
for( int i=0; i < maintenancePendings.size(); i++ ) {
if( i != index ) {
if( maintenancePendings.get( i ) > 0 ) {
cancelMaintenance = true;
break;
}
}
}
if( cancelMaintenance || this.isInMaintenance() ) {
maintenancePendings.subAt( 1, index );
}
}
// Do a check after the limit has expired
if( this.getDeferMaintenanceLimit( index ) > 0.0 ) {
this.startProcess( "scheduleCheckMaintenance", this.getDeferMaintenanceLimit( index ) );
}
}
}
// Determine the next maintenance time
nextMaintenanceTimes.addAt( maintenanceIntervals.getValue().get( index ), index );
// Find the next maintenance event
index = 0;
earliestTime = Double.POSITIVE_INFINITY;
for( int i=0; i < nextMaintenanceTimes.size(); i++ ) {
double time = nextMaintenanceTimes.get( i );
if( Tester.lessCheckTolerance( time, earliestTime ) ) {
earliestTime = time;
index = i;
nextMaintenanceDuration = maintenanceDurations.getValue().get( i );
}
}
}
}
public double getDeferMaintenanceLimit( int index ) {
if( deferMaintenanceLimit.getValue() == null )
return 0.0d;
return deferMaintenanceLimit.getValue().get( index );
}
public void scheduleCheckMaintenance( double wait ) {
scheduleWait( wait );
this.checkMaintenance();
}
public boolean shouldSkipMaintenance( int index ) {
if( skipMaintenanceIfOverlap.getValue().size() == 0 )
return false;
return skipMaintenanceIfOverlap.getValue().get( index );
}
/**
* Return TRUE if there is a pending maintenance for any schedule
*/
public boolean isMaintenancePending() {
for( int i = 0; i < maintenancePendings.size(); i++ ) {
if( maintenancePendings.get( i ) > 0 ) {
return true;
}
}
for( int i = 0; i < hoursForNextMaintenanceOperatingHours.size(); i++ ) {
if( this.getWorkingHours() > hoursForNextMaintenanceOperatingHours.get( i ) ) {
return true;
}
}
return false;
}
public boolean isForcedMaintenancePending() {
if( forceMaintenance.getValue() == null )
return false;
for( int i = 0; i < maintenancePendings.size(); i++ ) {
if( maintenancePendings.get( i ) > 0 && forceMaintenance.getValue().get(i) ) {
return true;
}
}
return false;
}
public ArrayList<ModelEntity> getSharedMaintenanceList () {
return sharedMaintenanceList.getValue();
}
public IntegerVector getMaintenancePendings () {
return maintenancePendings;
}
public DoubleListInput getMaintenanceDurations() {
return maintenanceDurations;
}
/**
* Return the start of the next scheduled maintenance time if not in maintenance,
* or the start of the current scheduled maintenance time if in maintenance
*/
public double getNextMaintenanceStartTime() {
if( nextMaintenanceTimes == null )
return Double.POSITIVE_INFINITY;
else
return nextMaintenanceTimes.getMin();
}
/**
* Return the duration of the next maintenance event (assuming only one pending)
*/
public double getNextMaintenanceDuration() {
return nextMaintenanceDuration;
}
// Shows if an Entity would ever go on service
public boolean hasServiceScheduled() {
if( firstMaintenanceTimes.getValue().size() != 0 || masterMaintenanceEntity != null ) {
return true;
}
return false;
}
public void setMasterMaintenanceBlock( ModelEntity aModel ) {
masterMaintenanceEntity = aModel;
}
// BREAKDOWN METHODS
/**
* No Comments Given.
*/
public void calculateTimeOfNextFailure() {
hoursForNextFailure = (this.getWorkingHours() + this.getNextBreakdownIAT());
}
/**
* Activity Network for Breakdowns.
*/
public void doBreakdown() {
}
/**
* Prints the header for the entity's state list.
* @return bottomLine contains format for each column of the bottom line of the group report
*/
public IntegerVector printUtilizationHeaderOn( FileEntity anOut ) {
IntegerVector bottomLine = new IntegerVector();
if( getStateList().size() != 0 ) {
anOut.putStringTabs( "Name", 1 );
bottomLine.add( ReportAgent.BLANK );
int doLoop = getStateList().size();
for( int x = 0; x < doLoop; x++ ) {
String state = (String)getStateList().get( x );
anOut.putStringTabs( state, 1 );
bottomLine.add( ReportAgent.AVERAGE_PCT_ONE_DEC );
}
anOut.newLine();
}
return bottomLine;
}
/**
* Print the entity's name and percentage of hours spent in each state.
* @return columnValues are the values for each column in the group report (0 if the value is a String)
*/
public DoubleVector printUtilizationOn( FileEntity anOut ) {
double total;
DoubleVector columnValues = new DoubleVector();
if( getNumberOfStates() != 0 ) {
total = getTotalHours();
if( !(total == 0.0) ) {
anOut.putStringTabs( getName(), 1 );
columnValues.add( 0.0 );
for( int i = 0; i < getNumberOfStates(); i++ ) {
double value = getTotalHoursFor( i ) / total;
anOut.putDoublePercentWithDecimals( value, 1 );
anOut.putTabs( 1 );
columnValues.add( value );
}
anOut.newLine();
}
}
return columnValues;
}
/**
* This method must be overridden in any subclass of ModelEntity.
*/
public boolean isAvailable() {
throw new ErrorException( "Must override isAvailable in any subclass of ModelEntity." );
}
/**
* This method must be overridden in any subclass of ModelEntity.
*/
public boolean canStartMaintenance( int index ) {
return isAvailable();
}
/**
* This method must be overridden in any subclass of ModelEntity.
*/
public boolean canStartForcedMaintenance() {
return isAvailable();
}
/**
* This method must be overridden in any subclass of ModelEntity.
*/
public boolean areAllEntitiesAvailable() {
throw new ErrorException( "Must override areAllEntitiesAvailable in any subclass of ModelEntity." );
}
/**
* Return the time of the next breakdown duration
*/
public double getBreakdownDuration() {
// if( traceFlag ) this.trace( "getBreakdownDuration()" );
// If a distribution was specified, then select a duration randomly from the distribution
if ( getDowntimeDurationDistribution() != null ) {
return getDowntimeDurationDistribution().nextValue();
}
else {
return 0.0;
}
}
/**
* Return the time of the next breakdown IAT
*/
public double getNextBreakdownIAT() {
if( getDowntimeIATDistribution() != null ) {
return getDowntimeIATDistribution().nextValue();
}
else {
return iATFailure;
}
}
public double getHoursForNextFailure() {
return hoursForNextFailure;
}
public void setHoursForNextFailure( double hours ) {
hoursForNextFailure = hours;
}
/**
Returns a vector of strings describing the ModelEntity.
Override to add details
@return Vector - tab delimited strings describing the DisplayEntity
**/
@Override
public Vector getInfo() {
Vector info = super.getInfo();
info.add( String.format("Present State\t%s", getPresentState()) );
return info;
}
protected DoubleVector getMaintenanceOperatingHoursIntervals() {
return maintenanceOperatingHoursIntervals.getValue();
}
protected double getMaintenanceOperatingHoursDurationFor(int index) {
return maintenanceOperatingHoursDurations.getValue().get(index);
}
protected ProbabilityDistribution getDowntimeIATDistribution() {
return downtimeIATDistribution.getValue();
}
} |
package com.atexpose;
import com.atexpose.dispatcher.IDispatcher;
import com.atexpose.dispatcherfactories.WebServerBuilder;
import io.schinzel.basicutils.Sandman;
import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.junit.After;
import org.junit.Test;
import java.io.IOException;
import static org.assertj.core.api.Assertions.*;
/**
* Tests different set ups of the web server.
*
* @author Schinzel
*/
public class WebServerCustomTest {
private AtExpose mAtExpose;
@After
public void after() {
mAtExpose.shutdown();
//Snooze required to get tests to work on Travis
Sandman.snoozeMillis(10);
}
@Test
public void requestPage_PageDoesNotExistCustom404_Custom404() throws IOException {
IDispatcher webServer = WebServerBuilder.create()
//Set dir where html and other files resides
.webServerDir("testfiles")
//Set custom 404 page
.fileName404Page("WebServerCustomTest_404Page.html")
//Build web server
.build();
mAtExpose = AtExpose.create()
//Start web server
.start(webServer);
String result = Jsoup
.connect("http://127.0.0.1:5555/noSuchFile.html")
.method(Connection.Method.GET)
.ignoreHttpErrors(true)
.execute()
.body();
assertThat(result).contains("Custom 404. File not found");
}
@Test
public void requestPage_PageDoesNotExistNotCustom404_Default404() throws IOException {
IDispatcher webServer = WebServerBuilder.create()
//Set dir where html and other files resides
.webServerDir("testfiles")
//Build web server
.build();
mAtExpose = AtExpose.create()
//Start web server
.start(webServer);
String result = Jsoup
.connect("http://127.0.0.1:5555/noSuchFile.html")
.method(Connection.Method.GET)
.ignoreHttpErrors(true)
.execute()
.body();
assertThat(result).contains("<html><body><center>File not found</center><body></html>");
}
} |
package com.softinstigate.restheart;
import ch.qos.logback.classic.Level;
import com.softinstigate.restheart.utils.URLUtilis;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.Yaml;
/**
*
* @author uji
*/
public class Configuration
{
public static String DOC_Path = "http:
private static final Logger logger = LoggerFactory.getLogger(Configuration.class);
private final boolean httpsListener;
private final int httpsPort;
private final String httpsHost;
private final boolean httpListener;
private final int httpPort;
private final String httpHost;
private final boolean ajpListener;
private final int ajpPort;
private final String ajpHost;
private final boolean useEmbeddedKeystore;
private final String keystoreFile;
private final String keystorePassword;
private final String certPassword;
private final List<Map<String, Object>> mongoServers;
private final List<Map<String, Object>> mongoCredentials;
private final List<Map<String, Object>> mongoMounts;
private final List<Map<String, Object>> applicationLogicMounts;
private final String idmImpl;
private final Map<String, Object> idmArgs;
private final String amImpl;
private final Map<String, Object> amArgs;
private final String logFilePath;
private final Level logLevel;
private final boolean logToConsole;
private final boolean logToFile;
private final boolean localCacheEnabled;
private final long localCacheTtl;
private final int requestsLimit;
private final int ioThreads;
private final int workerThreads;
private final int bufferSize;
private final int buffersPerRegion;
private final boolean directBuffers;
private final boolean forceGzipEncoding;
public static final String LOCAL_CACHE_ENABLED = "local-cache-enabled";
public static final String LOCAL_CACHE_TTL = "local-cache-ttl";
public static final String FORCE_GZIP_ENCODING = "force-gzip-encoding";
public static final String DIRECT_BUFFERS = "direct-buffers";
public static final String BUFFERS_PER_REGION = "buffers-per-region";
public static final String BUFFER_SIZE = "buffer-size";
public static final String WORKER_THREADS = "worker-threads";
public static final String IO_THREADS = "io-threads";
public static final String REQUESTS_LIMIT = "requests-limit";
public static final String ENABLE_LOG_FILE = "enable-log-file";
public static final String ENABLE_LOG_CONSOLE = "enable-log-console";
public static final String LOG_LEVEL = "log-level";
public static final String LOG_FILE_PATH = "log-file-path";
public static final String IMPLEMENTATION_CLASS = "implementation-class";
public static final String ACCESS_MANAGER = "access-manager";
public static final String IDM = "idm";
public static final String MONGO_SERVERS = "mongo-servers";
public static final String MONGO_CREDENTIALS = "mongo-credentials";
public static final String MONGO_MOUNTS = "mongo-mounts";
public static final String MONGO_MOUNT_WHAT = "what";
public static final String MONGO_MOUNT_WHERE = "where";
public static final String MONGO_AUTH_DB = "auth-db";
public static final String MONGO_PASSWORD = "password";
public static final String MONGO_USER = "user";
public static final String MONGO_PORT = "port";
public static final String MONGO_HOST = "host";
public static final String APPLICATION_LOGIC_MOUNTS = "application-logic-mounts";
public static final String APPLICATION_LOGIC_MOUNT_WHAT = "what";
public static final String APPLICATION_LOGIC_MOUNT_ARGS = "args";
public static final String APPLICATION_LOGIC_MOUNT_WHERE = "where";
public static final String APPLICATION_LOGIC_MOUNT_SECURED = "secured";
public static final String CERT_PASSWORD = "certpassword";
public static final String KEYSTORE_PASSWORD = "keystore-password";
public static final String KEYSTORE_FILE = "keystore-file";
public static final String USE_EMBEDDED_KEYSTORE = "use-embedded-keystore";
public static final String AJP_HOST = "ajp-host";
public static final String AJP_PORT = "ajp-port";
public static final String AJP_LISTENER = "ajp-listener";
public static final String HTTP_HOST = "http-host";
public static final String HTTP_PORT = "http-port";
public static final String HTTP_LISTENER = "http-listener";
public static final String HTTPS_HOST = "https-host";
public static final String HTTPS_PORT = "https-port";
public static final String HTTPS_LISTENER = "https-listener";
public Configuration()
{
httpsListener = true;
httpsPort = 4443;
httpsHost = "0.0.0.0";
httpListener = true;
httpPort = 8080;
httpHost = "0.0.0.0";
ajpListener = false;
ajpPort = 8009;
ajpHost = "0.0.0.0";
useEmbeddedKeystore = true;
keystoreFile = null;
keystorePassword = null;
certPassword = null;
mongoServers = new ArrayList<>();
Map<String, Object> defaultMongoServer = new HashMap<>();
defaultMongoServer.put(MONGO_HOST, "127.0.0.1");
defaultMongoServer.put(MONGO_PORT, 27017);
mongoServers.add(defaultMongoServer);
mongoCredentials = null;
mongoMounts = new ArrayList<>();
Map<String, Object> defaultMongoMounts = new HashMap<>();
defaultMongoMounts.put(MONGO_MOUNT_WHAT, "*");
defaultMongoMounts.put(MONGO_MOUNT_WHERE, "/");
mongoMounts.add(defaultMongoMounts);
applicationLogicMounts = new ArrayList<>();
idmImpl = null;
idmArgs = null;
amImpl = null;
amArgs = null;
logFilePath = URLUtilis.removeTrailingSlashes(System.getProperty("java.io.tmpdir")) + File.separator + "restheart.log";
logToConsole = true;
logToFile = true;
logLevel = Level.INFO;
localCacheEnabled = false;
localCacheTtl = 1000;
requestsLimit = 100;
ioThreads = 2;
workerThreads = 32;
bufferSize = 16384;
buffersPerRegion = 20;
directBuffers = true;
forceGzipEncoding = false;
}
public Configuration(String confFilePath)
{
Yaml yaml = new Yaml();
Map<String, Object> conf = null;
try
{
conf = (Map<String, Object>) yaml.load(new FileInputStream(new File(confFilePath)));
}
catch (FileNotFoundException fnef)
{
logger.error("configuration file not found. starting with default parameters.");
conf = null;
}
catch (Throwable t)
{
logger.error("wrong configuration file format. starting with default parameters.", t);
conf = null;
}
if (conf == null)
{
httpsListener = true;
httpsPort = 8443;
httpsHost = "0.0.0.0";
httpListener = true;
httpPort = 8080;
httpHost = "0.0.0.0";
ajpListener = false;
ajpPort = 8009;
ajpHost = "0.0.0.0";
useEmbeddedKeystore = true;
keystoreFile = null;
keystorePassword = null;
certPassword = null;
mongoServers = new ArrayList<>();
Map<String, Object> defaultMongoServer = new HashMap<>();
defaultMongoServer.put(MONGO_HOST, "127.0.0.1");
defaultMongoServer.put(MONGO_PORT, 27017);
mongoServers.add(defaultMongoServer);
mongoMounts = new ArrayList<>();
Map<String, Object> defaultMongoMounts = new HashMap<>();
defaultMongoMounts.put(MONGO_MOUNT_WHAT, "*");
defaultMongoMounts.put(MONGO_MOUNT_WHERE, "/");
mongoMounts.add(defaultMongoMounts);
applicationLogicMounts = new ArrayList<>();
mongoCredentials = null;
idmImpl = null;
idmArgs = null;
amImpl = null;
amArgs = null;
logFilePath = URLUtilis.removeTrailingSlashes(System.getProperty("java.io.tmpdir")) + File.separator + "restheart.log";
logToConsole = true;
logToFile = true;
logLevel = Level.INFO;
localCacheEnabled = false;
localCacheTtl = 1000;
requestsLimit = 100;
ioThreads = 2;
workerThreads = 32;
bufferSize = 16384;
buffersPerRegion = 20;
directBuffers = true;
forceGzipEncoding = false;
}
else
{
httpsListener = getAsBooleanOrDefault(conf, HTTPS_LISTENER, true);
httpsPort = getAsIntegerOrDefault(conf, HTTPS_PORT, 8443);
httpsHost = getAsStringOrDefault(conf, HTTPS_HOST, "0.0.0.0");
httpListener = getAsBooleanOrDefault(conf, HTTP_LISTENER, false);
httpPort = getAsIntegerOrDefault(conf, HTTP_PORT, 8080);
httpHost = getAsStringOrDefault(conf, HTTP_HOST, "0.0.0.0");
ajpListener = getAsBooleanOrDefault(conf, AJP_LISTENER, false);
ajpPort = getAsIntegerOrDefault(conf, AJP_PORT, 8009);
ajpHost = getAsStringOrDefault(conf, AJP_HOST, "0.0.0.0");
useEmbeddedKeystore = getAsBooleanOrDefault(conf, USE_EMBEDDED_KEYSTORE, true);
keystoreFile = getAsStringOrDefault(conf, KEYSTORE_FILE, null);
keystorePassword = getAsStringOrDefault(conf, KEYSTORE_PASSWORD, null);
certPassword = getAsStringOrDefault(conf, CERT_PASSWORD, null);
List<Map<String, Object>> mongoServersDefault = new ArrayList<>();
Map<String, Object> defaultMongoServer = new HashMap<>();
defaultMongoServer.put(MONGO_HOST, "127.0.0.1");
defaultMongoServer.put(MONGO_PORT, 27017);
mongoServersDefault.add(defaultMongoServer);
mongoServers = getAsListOfMaps(conf, MONGO_SERVERS, mongoServersDefault);
mongoCredentials = getAsListOfMaps(conf, MONGO_CREDENTIALS, null);
List<Map<String, Object>> mongoMountsDefault = new ArrayList<>();
Map<String, Object> defaultMongoMounts = new HashMap<>();
defaultMongoMounts.put(MONGO_MOUNT_WHAT, "*");
defaultMongoMounts.put(MONGO_MOUNT_WHERE, "/");
mongoMountsDefault.add(defaultMongoMounts);
mongoMounts = getAsListOfMaps(conf, MONGO_MOUNTS, mongoMountsDefault);
applicationLogicMounts = getAsListOfMaps(conf, APPLICATION_LOGIC_MOUNTS, new ArrayList<>());
Map<String, Object> idm = getAsMap(conf, IDM);
Map<String, Object> am = getAsMap(conf, ACCESS_MANAGER);
idmImpl = getAsStringOrDefault(idm, IMPLEMENTATION_CLASS, "com.softinstigate.restheart.security.impl.SimpleFileIdentityManager");
idmArgs = idm;
amImpl = getAsStringOrDefault(am, IMPLEMENTATION_CLASS, "com.softinstigate.restheart.security.impl.SimpleAccessManager");
amArgs = am;
logFilePath = getAsStringOrDefault(conf, LOG_FILE_PATH, URLUtilis.removeTrailingSlashes(System.getProperty("java.io.tmpdir")) + File.separator + "restheart.log");
String _logLevel = getAsStringOrDefault(conf, LOG_LEVEL, "WARN");
logToConsole = getAsBooleanOrDefault(conf, ENABLE_LOG_CONSOLE, true);
logToFile = getAsBooleanOrDefault(conf, ENABLE_LOG_FILE, true);
Level level;
try
{
level = Level.valueOf(_logLevel);
}
catch (Exception e)
{
logger.info("wrong value for parameter {}: {}. using its default value {}", "log-level", _logLevel, "WARN");
level = Level.WARN;
}
logLevel = level;
requestsLimit = getAsIntegerOrDefault(conf, REQUESTS_LIMIT, 100);
localCacheEnabled = getAsBooleanOrDefault(conf, LOCAL_CACHE_ENABLED, false);
localCacheTtl = getAsLongOrDefault(conf, LOCAL_CACHE_TTL, (long)1000);
ioThreads = getAsIntegerOrDefault(conf, IO_THREADS, 2);
workerThreads = getAsIntegerOrDefault(conf, WORKER_THREADS, 32);
bufferSize = getAsIntegerOrDefault(conf, BUFFER_SIZE, 16384);
buffersPerRegion = getAsIntegerOrDefault(conf, BUFFERS_PER_REGION, 20);
directBuffers = getAsBooleanOrDefault(conf, DIRECT_BUFFERS, true);
forceGzipEncoding = getAsBooleanOrDefault(conf, FORCE_GZIP_ENCODING, false);
}
}
private static List<Map<String, Object>> getAsListOfMaps(Map<String, Object> conf, String key, List<Map<String, Object>> defaultValue)
{
if (conf == null)
{
logger.warn("parameters group {} not specified in the configuration file. using its default value {}", key, defaultValue);
return defaultValue;
}
Object o = conf.get(key);
if (o instanceof List)
{
return (List<Map<String, Object>>) o;
}
else
{
logger.warn("parameters group {} not specified in the configuration file, using its default value {}", key, defaultValue);
return defaultValue;
}
}
private static Map<String, Object> getAsMap(Map<String, Object> conf, String key)
{
if (conf == null)
{
logger.warn("parameters group {} not specified in the configuration file.", key);
return null;
}
Object o = conf.get(key);
if (o instanceof Map)
{
return (Map<String, Object>) o;
}
else
{
logger.warn("parameters group {} not specified in the configuration file.", key);
return null;
}
}
private static Boolean getAsBooleanOrDefault(Map<String, Object> conf, String key, Boolean defaultValue)
{
if (conf == null)
{
logger.error("tried to get paramenter {} from a null configuration map. using its default value {}", key, defaultValue);
return defaultValue;
}
Object o = conf.get(key);
if (o == null)
{
if (defaultValue != null) // if default value is null there is no default value actually
{
logger.info("parameter {} not specified in the configuration file. using its default value {}", key, defaultValue);
}
return defaultValue;
}
else if (o instanceof Boolean)
{
logger.debug("paramenter {} set to {}", key, o);
return (Boolean) o;
}
else
{
logger.info("wrong value for parameter {}: {}. using its default value {}", key, o, defaultValue);
return defaultValue;
}
}
private static String getAsStringOrDefault(Map<String, Object> conf, String key, String defaultValue)
{
if (conf == null)
{
logger.error("tried to get paramenter {} from a null configuration map. using its default value {}", key, defaultValue);
return null;
}
Object o = conf.get(key);
if (o == null)
{
if (defaultValue != null) // if default value is null there is no default value actually
{
logger.info("parameter {} not specified in the configuration file. using its default value {}", key, defaultValue);
}
return defaultValue;
}
else if (o instanceof String)
{
logger.debug("paramenter {} set to {}", key, o);
return (String) o;
}
else
{
logger.info("wrong value for parameter {}: {}. using its default value {}", key, o, defaultValue);
return defaultValue;
}
}
private static Integer getAsIntegerOrDefault(Map<String, Object> conf, String key, Integer defaultValue)
{
if (conf == null)
{
logger.error("tried to get paramenter {} from a null configuration map. using its default value {}", key, defaultValue);
return null;
}
Object o = conf.get(key);
if (o == null)
{
if (defaultValue != null) // if default value is null there is no default value actually
{
logger.info("parameter {} not specified in the configuration file. using its default value {}", key, defaultValue);
}
return defaultValue;
}
else if (o instanceof Integer)
{
logger.debug("paramenter {} set to {}", key, o);
return (Integer) o;
}
else
{
logger.info("wrong value for parameter {}: {}. using its default value {}", key, o, defaultValue);
return defaultValue;
}
}
private static Long getAsLongOrDefault(Map<String, Object> conf, String key, Long defaultValue)
{
if (conf == null)
{
logger.error("tried to get paramenter {} from a null configuration map. using its default value {}", key, defaultValue);
return null;
}
Object o = conf.get(key);
if (o == null)
{
if (defaultValue != null) // if default value is null there is no default value actually
{
logger.info("parameter {} not specified in the configuration file. using its default value {}", key, defaultValue);
}
return defaultValue;
}
else if (o instanceof Number)
{
logger.debug("paramenter {} set to {}", key, o);
try
{
return Long.parseLong(o.toString());
}
catch(NumberFormatException nfe)
{
logger.info("wrong value for parameter {}: {}. using its default value {}", key, o, defaultValue);
return defaultValue;
}
}
else
{
logger.info("wrong value for parameter {}: {}. using its default value {}", key, o, defaultValue);
return defaultValue;
}
}
/**
* @return the httpsListener
*/
public boolean isHttpsListener()
{
return httpsListener;
}
/**
* @return the httpsPort
*/
public int getHttpsPort()
{
return httpsPort;
}
/**
* @return the httpsHost
*/
public String getHttpsHost()
{
return httpsHost;
}
/**
* @return the httpListener
*/
public boolean isHttpListener()
{
return httpListener;
}
/**
* @return the httpPort
*/
public int getHttpPort()
{
return httpPort;
}
/**
* @return the httpHost
*/
public String getHttpHost()
{
return httpHost;
}
/**
* @return the ajpListener
*/
public boolean isAjpListener()
{
return ajpListener;
}
/**
* @return the ajpPort
*/
public int getAjpPort()
{
return ajpPort;
}
/**
* @return the ajpHost
*/
public String getAjpHost()
{
return ajpHost;
}
/**
* @return the useEmbeddedKeystore
*/
public boolean isUseEmbeddedKeystore()
{
return useEmbeddedKeystore;
}
/**
* @return the keystoreFile
*/
public String getKeystoreFile()
{
return keystoreFile;
}
/**
* @return the keystorePassword
*/
public String getKeystorePassword()
{
return keystorePassword;
}
/**
* @return the certPassword
*/
public String getCertPassword()
{
return certPassword;
}
/**
* @return the logFilePath
*/
public String getLogFilePath()
{
return logFilePath;
}
/**
* @return the logLevel
*/
public Level getLogLevel()
{
return logLevel;
}
/**
* @return the logToConsole
*/
public boolean isLogToConsole()
{
return logToConsole;
}
/**
* @return the logToFile
*/
public boolean isLogToFile()
{
return logToFile;
}
/**
* @return the ioThreads
*/
public int getIoThreads()
{
return ioThreads;
}
/**
* @return the workerThreads
*/
public int getWorkerThreads()
{
return workerThreads;
}
/**
* @return the bufferSize
*/
public int getBufferSize()
{
return bufferSize;
}
/**
* @return the buffersPerRegion
*/
public int getBuffersPerRegion()
{
return buffersPerRegion;
}
/**
* @return the directBuffers
*/
public boolean isDirectBuffers()
{
return directBuffers;
}
/**
* @return the forceGzipEncoding
*/
public boolean isForceGzipEncoding()
{
return forceGzipEncoding;
}
/**
* @return the idmImpl
*/
public String getIdmImpl()
{
return idmImpl;
}
/**
* @return the idmArgs
*/
public Map<String, Object> getIdmArgs()
{
return idmArgs;
}
/**
* @return the amImpl
*/
public String getAmImpl()
{
return amImpl;
}
/**
* @return the amArgs
*/
public Map<String, Object> getAmArgs()
{
return amArgs;
}
/**
* @return the requestsLimit
*/
public int getRequestLimit()
{
return getRequestsLimit();
}
/**
* @return the mongoServers
*/
public List<Map<String, Object>> getMongoServers()
{
return mongoServers;
}
/**
* @return the mongoCredentials
*/
public List<Map<String, Object>> getMongoCredentials()
{
return mongoCredentials;
}
/**
* @return the mongoMountsDefault
*/
public List<Map<String, Object>> getMongoMounts()
{
return mongoMounts;
}
/**
* @return the localCacheEnabled
*/
public boolean isLocalCacheEnabled()
{
return localCacheEnabled;
}
/**
* @return the localCacheTtl
*/
public long getLocalCacheTtl()
{
return localCacheTtl;
}
/**
* @return the requestsLimit
*/
public int getRequestsLimit()
{
return requestsLimit;
}
/**
* @return the applicationLogicMounts
*/
public List<Map<String, Object>> getApplicationLogicMounts()
{
return applicationLogicMounts;
}
} |
package uk.org.cinquin.mutinack.misc_util;
public class VersionNumber {
public static final float version = 0.7f;
} |
package com.trontheim.expstore.common;
import com.trontheim.expstore.ExperienceStore;
import com.trontheim.expstore.block.BlockExpChanger;
import com.trontheim.expstore.client.renderer.block.RenderBlockExpStore;
import com.trontheim.expstore.init.ESBlocks;
import com.trontheim.expstore.tileentity.TileEntityExpStore;
import cpw.mods.fml.client.registry.RenderingRegistry;
import cpw.mods.fml.common.event.FMLInitializationEvent;
import cpw.mods.fml.common.event.FMLPostInitializationEvent;
import cpw.mods.fml.common.event.FMLPreInitializationEvent;
import cpw.mods.fml.common.registry.GameRegistry;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.minecraft.launchwrapper.Launch;
import net.minecraft.tileentity.TileEntity;
abstract public class CommonProxy {
private static final boolean developmentEnvironment = (Boolean) Launch.blackboard.get("fml.deobfuscatedEnvironment");
public void preInit(FMLPreInitializationEvent event) {
TileEntity.addMapping(TileEntityExpStore.class, ExperienceStore.MODID + ":TileEntityExpStore");
RenderingRegistry.registerBlockHandler(RenderBlockExpStore.instance());
GameRegistry.addRecipe(new ItemStack(ESBlocks.expStore), "ogo", "gGg", "ogo", 'o', Blocks.obsidian, 'g', Items.gold_ingot, 'G', Blocks.glass);
if(isDevelopmentEnvironment()) {
GameRegistry.addRecipe(new ItemStack(ESBlocks.expChanger), "ogo", "gGg", "ogo", 'o', Blocks.obsidian, 'g', Blocks.gold_block, 'G', Blocks.glass);
}
}
public static boolean isDevelopmentEnvironment() {
return developmentEnvironment;
}
public void init(FMLInitializationEvent event) {
}
public void postInit(FMLPostInitializationEvent event) {
}
} |
// SiteMapServlet.java
// blogwt
package com.willshex.blogwt.server;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletResponse;
import com.willshex.blogwt.server.helper.ServletHelper;
import com.willshex.blogwt.server.service.page.PageServiceProvider;
import com.willshex.blogwt.server.service.post.PostServiceProvider;
import com.willshex.blogwt.server.service.tag.TagServiceProvider;
import com.willshex.blogwt.shared.api.Pager;
import com.willshex.blogwt.shared.api.SortDirectionType;
import com.willshex.blogwt.shared.api.datatype.Page;
import com.willshex.blogwt.shared.api.datatype.Post;
import com.willshex.blogwt.shared.api.datatype.PostSortType;
import com.willshex.blogwt.shared.api.datatype.Tag;
import com.willshex.blogwt.shared.helper.PagerHelper;
import com.willshex.blogwt.shared.page.PageType;
import com.willshex.service.ContextAwareServlet;
/**
* @author William Shakour (billy1380)
*
*/
public class SiteMapServlet extends ContextAwareServlet {
private static final long serialVersionUID = 3133978953838954164L;
private static final String MIME_TYPE = "application/xml; charset=UTF-8";
private static final String LOC_FORMAT = " <url><loc>%s/%s</loc></url>";
/* (non-Javadoc)
*
* @see com.willshex.service.ContextAwareServlet#doGet() */
@Override
protected void doGet () throws ServletException, IOException {
super.doGet();
String url = ServletHelper.constructBaseUrl(REQUEST.get());
HttpServletResponse response = RESPONSE.get();
PrintWriter p = response.getWriter();
response.setContentType(MIME_TYPE);
p.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
p.println("<urlset xmlns=\"http:
printRoot(p, url);
printPages(p, url);
printBlog(p, url);
printPosts(p, url);
printTags(p, url);
p.println("</urlset>");
p.close();
}
protected void doPost () throws ServletException, IOException {
doGet();
}
private void printRoot (PrintWriter p, String url) {
p.println(String.format(LOC_FORMAT, url, "#!"));
}
private void printBlog (PrintWriter p, String url) {
p.println(String.format(LOC_FORMAT, url,
"#" + PageType.PostsPageType.asTargetHistoryToken()));
}
private void printPosts (PrintWriter p, String url) {
List<Post> posts;
Pager pager = PagerHelper.createDefaultPager();
do {
posts = PostServiceProvider.provide().getPosts(Boolean.FALSE,
Boolean.FALSE, pager.start, pager.count,
PostSortType.PostSortTypePublished,
SortDirectionType.SortDirectionTypeDescending);
if (posts != null) {
PagerHelper.moveForward(pager);
for (Post post : posts) {
p.println(String.format(
LOC_FORMAT,
url,
"
+ PageType.PostDetailPageType
.asTargetHistoryToken(post.slug)));
}
}
} while (posts != null && posts.size() >= pager.count.intValue());
}
private void printPages (PrintWriter p, String url) {
List<Page> pages = PageServiceProvider.provide().getPages(
Boolean.FALSE, Integer.valueOf(0), null, null, null);
if (pages != null) {
for (Page page : pages) {
p.println(String.format(LOC_FORMAT, url, "#!" + page.slug));
}
}
}
private void printTags (PrintWriter p, String url) {
List<Tag> tags = TagServiceProvider.provide().getTags();
if (tags.size() >= 0) {
for (Tag tag : tags) {
p.println(String.format(
LOC_FORMAT,
url,
"
+ PageType.TagPostsPageType
.asTargetHistoryToken(tag.slug)));
}
}
}
} |
package com.xtremelabs.robolectric.util;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class Scheduler {
private List<PostedRunnable> postedRunnables = new ArrayList<PostedRunnable>();
private long currentTime = 0;
private boolean paused = false;
public void pause() {
paused = true;
}
public void unPause() {
paused = false;
advanceToLastPostedRunnable();
}
public boolean isPaused() {
return paused;
}
public void postDelayed(Runnable runnable, long delayMillis) {
if (paused || delayMillis > 0) {
postedRunnables.add(new PostedRunnable(runnable, currentTime + delayMillis));
Collections.sort(postedRunnables);
} else {
runnable.run();
}
}
public void post(Runnable runnable) {
postDelayed(runnable, 0);
}
public boolean advanceToLastPostedRunnable() {
if (enqueuedTaskCount() < 1) {
return false;
}
return advanceTo(postedRunnables.get(postedRunnables.size() - 1).scheduledTime);
}
public boolean advanceToNextPostedRunnable() {
if (enqueuedTaskCount() < 1) {
return false;
}
return advanceTo(postedRunnables.get(0).scheduledTime);
}
public boolean advanceBy(long intervalMs) {
long endingTime = currentTime + intervalMs;
return advanceTo(endingTime);
}
public boolean advanceTo(long endingTime) {
if (endingTime - currentTime < 0 || enqueuedTaskCount() < 1) {
return false;
}
int runCount = 0;
while (nextTaskIsScheduledBefore(endingTime)) {
runOneTask();
++runCount;
}
currentTime = endingTime;
return runCount > 0;
}
public boolean runOneTask() {
if (enqueuedTaskCount() < 1) {
return false;
}
PostedRunnable postedRunnable = postedRunnables.remove(0);
currentTime = postedRunnable.scheduledTime;
postedRunnable.run();
return true;
}
public boolean runTasks(int howMany) {
if (enqueuedTaskCount() < howMany) {
return false;
}
while (howMany > 0) {
PostedRunnable postedRunnable = postedRunnables.remove(0);
currentTime = postedRunnable.scheduledTime;
postedRunnable.run();
howMany
}
return true;
}
public int enqueuedTaskCount() {
return postedRunnables.size();
}
public boolean areAnyRunnable() {
return nextTaskIsScheduledBefore(currentTime);
}
public void reset() {
postedRunnables.clear();
paused = false;
}
class PostedRunnable implements Comparable<PostedRunnable> {
Runnable runnable;
long scheduledTime;
PostedRunnable(Runnable runnable, long scheduledTime) {
this.runnable = runnable;
this.scheduledTime = scheduledTime;
}
@Override
public int compareTo(PostedRunnable postedRunnable) {
return (int) (scheduledTime - postedRunnable.scheduledTime);
}
public void run() {
runnable.run();
}
}
private boolean nextTaskIsScheduledBefore(long endingTime) {
return enqueuedTaskCount() > 0 && postedRunnables.get(0).scheduledTime <= endingTime;
}
} |
package com.zavteam.plugins.packets;
import com.zavteam.plugins.utils.PluginPM;
import com.zavteam.plugins.utils.PluginPM.MessageType;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.OfflinePlayer;
import org.bukkit.entity.Player;
import org.bukkit.util.ChatPaginator;
import java.util.*;
public class MessagePacket extends AutoPacket {
private String permission;
private List<String> messages = new ArrayList<String>();
private List<UUID> players = new ArrayList<UUID>();
public MessagePacket(String message) {
this(message, null);
}
public MessagePacket(String message, String permission) {
messages.add(message);
this.permission = permission;
}
public MessagePacket(Collection<String> collection) {
this(collection, null);
}
public MessagePacket(Collection<String> collection, String permission) {
messages.addAll(collection);
this.permission = permission;
}
public MessagePacket(String[] messages) {
this(messages, null);
}
public MessagePacket(String[] messages, String permission) {
for (String message : messages) {
this.messages.add(message);
}
this.permission = permission;
}
public String getPermission() {
return permission;
}
public void setPermission(String permission) {
this.permission = permission;
}
public List<String> getMessages() {
return messages;
}
public List<UUID> getPlayers() {
return players;
}
public void setMessages(List<String> messages) {
this.messages = messages;
}
/**
* This method applies all color affects. If the messages variable is only one line in length, it splits the lines up.
*/
public void processMessages(boolean chatPaginating) {
/**
* This if section checks if their is only one message in the list.
* If so it splits the message into the appropriate lines.
* Their should not be a case where multiple lines are added that still need to be split
*/
if (messages.size() == 1) {
List<String> newMessages = new ArrayList<String>();
newMessages.addAll(Arrays.asList(messages.get(0).split("%n")));
messages = newMessages;
}
if (chatPaginating) {
List<String> newMessages = new ArrayList<String>();
for (String message : messages) {
newMessages.addAll(Arrays.asList(ChatPaginator.paginate(message, 1).getLines()));
}
messages = newMessages;
}
/**
* Color messages
*/
for (String message : messages) {
message = ChatColor.translateAlternateColorCodes('&', message);
}
}
@Override
public void processPacket() {
for (String message : messages) {
for (UUID uuid : players) {
OfflinePlayer offlinePlayer = Bukkit.getOfflinePlayer(uuid);
if (offlinePlayer.isOnline()) {
Player player = (Player) offlinePlayer;
PluginPM.sendMessage(MessageType.NO_FORMATTING, message);
}
}
}
}
} |
package com.zero_x_baadf00d.partialize;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ContainerNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.zero_x_baadf00d.partialize.converter.Converter;
import com.zero_x_baadf00d.partialize.policy.AccessPolicy;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.text.WordUtils;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/**
* Create a partial JSON document from any kind of objects.
*
* @author Thibault Meyer
* @version 18.05.10
* @since 16.01.18
*/
@SuppressWarnings("UnusedReturnValue")
public class Partialize {
/**
* Default maximum reachable depth level.
*
* @since 16.01.18
*/
private static final int DEFAULT_MAXIMUM_DEPTH = 64;
/**
* Default scanner delimiter pattern.
*
* @since 16.01.18
*/
private static final String SCANNER_DELIMITER = ",";
/**
* Method prefixes.
*
* @since 17.06.28
*/
private static final String[] METHOD_PREFIXES = {"get", "is", "has", "can"};
/**
* Pattern used to extract arguments.
*
* @since 16.01.18
*/
private final Pattern fieldArgsPattern = Pattern.compile("([a-zA-Z0-9]+)\\((.+)\\)");
/**
* Object mapper used to create new object nodes.
*
* @since 16.01.18
*/
private final ObjectMapper objectMapper;
/**
* The maximum reachable depth level.
*
* @since 16.01.18
*/
private final int maximumDepth;
/**
* The access policy function.
*
* @since 16.02.13
*/
private Function<AccessPolicy, Boolean> accessPolicyFunction;
/**
* Defined aliases.
*
* @since 16.03.11
*/
private Map<String, String> aliases;
/**
* Exception function.
*
* @since 16.03.15
*/
private Consumer<Exception> exceptionConsumer;
/**
* Build a default instance.
*
* @since 16.01.18
*/
public Partialize() {
this(com.zero_x_baadf00d.partialize.Partialize.DEFAULT_MAXIMUM_DEPTH);
}
/**
* Build an instance with a specific maximum depth value set.
*
* @param maximumDepth Maximum allowed depth value to set
* @since 16.01.18
*/
public Partialize(final int maximumDepth) {
this.exceptionConsumer = null;
this.objectMapper = new ObjectMapper();
this.maximumDepth = maximumDepth > 0 ? maximumDepth : 1;
}
/**
* Defines a field that will be called throughout the process
* to verify whether the requested element can be integrated or
* not to the partial JSON document.
*
* @param apFunction The function to execute
* @return The current instance of {@code Partialize}
* @since 16.02.13
*/
public Partialize setAccessPolicy(final Function<AccessPolicy, Boolean> apFunction) {
this.accessPolicyFunction = apFunction;
return this;
}
/**
* Defines a callback that will be called throughout the process
* when exception occurs.
*
* @param exceptionCallback The callback to execute
* @return The current instance of {@code Partialize}
* @since 16.03.15
*/
public Partialize setExceptionCallback(final Consumer<Exception> exceptionCallback) {
this.exceptionConsumer = exceptionCallback;
return this;
}
/**
* Defines field aliases.
*
* @param aliases A {@code Map} defining aliases
* @return The current instance of {@code Partialize}
* @since 16.03.10
*/
public Partialize setAliases(final Map<String, String> aliases) {
this.aliases = aliases;
return this;
}
/**
* Build a JSON object from data taken from the scanner and
* the given class type and instance.
*
* @param fields The field query to request
* @param clazz The class of the object to render
* @return An instance of {@code ContainerNode}
* @see ContainerNode
* @since 16.01.18
*/
public ContainerNode buildPartialObject(final String fields, final Class<?> clazz) {
return this.buildPartialObject(fields, clazz, null);
}
/**
* Build a JSON object from data taken from the scanner and
* the given class type and instance.
*
* @param fields The field query to request
* @param clazz The class of the object to render
* @param instance The instance of the object to render
* @return An instance of {@code ContainerNode}
* @see ContainerNode
* @since 16.01.18
*/
public ContainerNode buildPartialObject(final String fields, final Class<?> clazz, final Object instance) {
if (instance instanceof Collection<?>) {
final ArrayNode partialArray = this.objectMapper.createArrayNode();
if (((Collection<?>) instance).size() > 0) {
for (final Object o : (Collection<?>) instance) {
partialArray.add(this.buildPartialObject(-1, fields, o.getClass(), o));
}
}
return partialArray;
} else {
return this.buildPartialObject(0, fields, clazz, instance);
}
}
/**
* Add requested item on the partial JSON document.
*
* @param depth Current depth level
* @param aliasField The alias field name
* @param field The field name
* @param args The field Arguments
* @param partialArray The current partial JSON document part
* @param clazz The class of the object to add
* @param object The object to add
* @since 16.01.18
*/
private void internalBuild(final int depth, final String aliasField, final String field, final String args,
final ArrayNode partialArray, final Class<?> clazz, final Object object) {
if (depth < this.maximumDepth) {
if (object == null) {
partialArray.addNull();
} else if (object instanceof String) {
partialArray.add((String) object);
} else if (object instanceof Integer) {
partialArray.add((Integer) object);
} else if (object instanceof Long) {
partialArray.add((Long) object);
} else if (object instanceof Double) {
partialArray.add((Double) object);
} else if (object instanceof UUID) {
partialArray.add(object.toString());
} else if (object instanceof Boolean) {
partialArray.add((Boolean) object);
} else if (object instanceof JsonNode) {
partialArray.addPOJO(object);
} else if (object instanceof Collection<?>) {
final ArrayNode anotherPartialArray = partialArray.addArray();
if (((Collection<?>) object).size() > 0) {
for (final Object o : (Collection<?>) object) {
this.internalBuild(depth + 1, aliasField, field, args, anotherPartialArray, o.getClass(), o);
}
}
} else if (object instanceof Enum) {
final String tmp = object.toString();
try {
partialArray.add(Integer.valueOf(tmp));
} catch (final NumberFormatException ignore) {
partialArray.add(tmp);
}
} else {
final Converter converter = PartializeConverterManager.getInstance().getConverter(object.getClass());
if (converter != null) {
converter.convert(aliasField, object, partialArray);
} else {
partialArray.add(this.buildPartialObject(depth + 1, args, object.getClass(), object));
}
}
}
}
/**
* Add requested item on the partial JSON document.
*
* @param depth Current depth level
* @param aliasField The alias field name
* @param field The field name
* @param args The field Arguments
* @param partialObject The current partial JSON document part
* @param clazz The class of the object to add
* @param object The object to add
* @since 16.01.18
*/
private void internalBuild(final int depth, final String aliasField, final String field, final String args,
final ObjectNode partialObject, final Class<?> clazz, Object object) {
if (depth <= this.maximumDepth) {
if (object instanceof Optional) {
object = ((Optional<?>) object).orElse(null);
}
if (object == null) {
partialObject.putNull(aliasField);
} else if (object instanceof String) {
partialObject.put(aliasField, (String) object);
} else if (object instanceof Integer) {
partialObject.put(aliasField, (Integer) object);
} else if (object instanceof Long) {
partialObject.put(aliasField, (Long) object);
} else if (object instanceof Double) {
partialObject.put(aliasField, (Double) object);
} else if (object instanceof UUID) {
partialObject.put(aliasField, object.toString());
} else if (object instanceof Boolean) {
partialObject.put(aliasField, (Boolean) object);
} else if (object instanceof JsonNode) {
partialObject.putPOJO(aliasField, object);
} else if (object instanceof Collection<?>) {
final ArrayNode partialArray = partialObject.putArray(aliasField);
if (((Collection<?>) object).size() > 0) {
for (final Object o : (Collection<?>) object) {
this.internalBuild(depth, aliasField, field, args, partialArray, o.getClass(), o);
}
}
} else if (object instanceof Map<?, ?>) {
this.buildPartialObject(depth + 1, args, object.getClass(), object, partialObject.putObject(aliasField));
} else if (object instanceof Enum) {
final String tmp = object.toString();
try {
partialObject.put(aliasField, Integer.valueOf(tmp));
} catch (final NumberFormatException ignore) {
partialObject.put(aliasField, tmp);
}
} else {
final Converter converter = PartializeConverterManager.getInstance().getConverter(object.getClass());
if (converter != null) {
converter.convert(aliasField, object, partialObject);
} else {
this.buildPartialObject(depth + 1, args, object.getClass(), object, partialObject.putObject(aliasField));
}
}
}
}
/**
* Build a JSON object from data taken from the scanner and
* the given class type and instance.
*
* @param depth The current depth
* @param fields The field names to requests
* @param clazz The class of the object to render
* @param instance The instance of the object to render
* @return A JSON Object
* @since 16.01.18
*/
private ContainerNode buildPartialObject(final int depth, final String fields,
final Class<?> clazz, final Object instance) {
return this.buildPartialObject(depth, fields, clazz, instance, this.objectMapper.createObjectNode());
}
/**
* Build a JSON object from data taken from the scanner and
* the given class type and instance.
*
* @param depth The current depth
* @param fields The field names to requests
* @param clazz The class of the object to render
* @param instance The instance of the object to render
* @param partialObject The partial JSON document
* @return A JSON Object
* @since 16.01.18
*/
private ContainerNode buildPartialObject(final int depth, String fields, final Class<?> clazz,
final Object instance, final ObjectNode partialObject) {
if (depth <= this.maximumDepth) {
final ObjectType objectType;
if (clazz.isAnnotationPresent(com.zero_x_baadf00d.partialize.annotation.Partialize.class)) {
objectType = ObjectType.ANNOTATED;
} else if (instance instanceof Map<?, ?>) {
objectType = ObjectType.MAP;
} else if (instance instanceof Collection<?>) {
final ArrayNode partialArray = this.objectMapper.createArrayNode();
if (((Collection<?>) instance).size() > 0) {
for (final Object o : (Collection<?>) instance) {
this.internalBuild(depth + 1, null, null, null, partialArray, o.getClass(), o);
}
}
return partialArray;
} else {
objectType = ObjectType.NOT_SUPPORTED;
}
if (objectType != ObjectType.NOT_SUPPORTED) {
final List<String> closedFields = new ArrayList<>();
List<String> allowedFields;
List<String> defaultFields = null;
switch (objectType) {
case ANNOTATED:
allowedFields = Arrays.asList(
clazz
.getAnnotation(com.zero_x_baadf00d.partialize.annotation.Partialize.class)
.allowedFields()
);
defaultFields = Arrays.asList(
clazz
.getAnnotation(com.zero_x_baadf00d.partialize.annotation.Partialize.class)
.defaultFields()
);
if (allowedFields.isEmpty()) {
allowedFields = new ArrayList<>();
for (final Method m : clazz.getDeclaredMethods()) {
final String methodName = m.getName();
for (final String methodPrefix : Partialize.METHOD_PREFIXES) {
if (methodName.startsWith(methodPrefix)) {
final char[] c = methodName.substring(methodPrefix.length()).toCharArray();
c[0] = Character.toLowerCase(c[0]);
allowedFields.add(new String(c));
}
}
}
}
break;
case MAP:
allowedFields = new ArrayList<>();
for (Map.Entry<?, ?> e : ((Map<?, ?>) instance).entrySet()) {
allowedFields.add(String.valueOf(e.getKey()));
}
break;
default:
throw new NotImplementedException("Can't convert " + clazz.getCanonicalName());
}
if (defaultFields == null || defaultFields.isEmpty()) {
defaultFields = allowedFields.stream()
.map(this::resolveAlias)
.collect(Collectors.toList());
}
if (fields == null || fields.length() == 0) {
fields = String.join(",", defaultFields);
}
Scanner scanner = new Scanner(fields);
scanner.useDelimiter(com.zero_x_baadf00d.partialize.Partialize.SCANNER_DELIMITER);
while (scanner.hasNext()) {
String word = scanner.next();
String args = null;
if (word.compareTo("*") == 0) {
final StringBuilder sb = new StringBuilder();
if (scanner.hasNext()) {
scanner.useDelimiter("\n");
sb.append(",");
sb.append(scanner.next());
}
final Scanner newScanner = new Scanner(allowedFields.stream()
.filter(f -> !closedFields.contains(f))
.map(this::resolveAlias)
.collect(Collectors.joining(",")) + sb.toString());
newScanner.useDelimiter(com.zero_x_baadf00d.partialize.Partialize.SCANNER_DELIMITER);
scanner.close();
scanner = newScanner;
}
if (word.contains("(")) {
while (scanner.hasNext() && (StringUtils.countMatches(word, "(") != StringUtils.countMatches(word, ")"))) {
word += "," + scanner.next();
}
final Matcher m = this.fieldArgsPattern.matcher(word);
if (m.find()) {
word = m.group(1);
args = m.group(2);
}
}
final String aliasField = word;
final String field = this.aliases != null && this.aliases.containsKey(aliasField) ? this.aliases.get(aliasField) : aliasField;
if (allowedFields.stream().anyMatch(f -> f.toLowerCase(Locale.ENGLISH).compareTo(field.toLowerCase(Locale.ENGLISH)) == 0)) {
if (this.accessPolicyFunction != null && !this.accessPolicyFunction.apply(new AccessPolicy(clazz, instance, field))) {
continue;
}
closedFields.add(aliasField);
switch (objectType) {
case ANNOTATED:
for (final String methodPrefix : Partialize.METHOD_PREFIXES) {
try {
final Method method = clazz.getMethod(methodPrefix + WordUtils.capitalize(field));
final Object object = method.invoke(instance);
this.internalBuild(depth, aliasField, field, args, partialObject, clazz, object);
break;
} catch (final IllegalAccessException | InvocationTargetException | NoSuchMethodException | NullPointerException ignore) {
try {
final Method method = clazz.getMethod(field);
final Object object = method.invoke(instance);
this.internalBuild(depth, aliasField, field, args, partialObject, clazz, object);
break;
} catch (final IllegalAccessException | InvocationTargetException | NoSuchMethodException ex) {
if (this.exceptionConsumer != null) {
this.exceptionConsumer.accept(ex);
}
}
}
}
break;
case MAP:
final Map<?, ?> tmpMap = (Map<?, ?>) instance;
if (tmpMap.containsKey(field)) {
final Object object = tmpMap.get(field);
this.internalBuild(depth, aliasField, field, args, partialObject, clazz, object);
}
break;
default:
break;
}
}
}
} else {
throw new RuntimeException("Can't convert " + clazz.getCanonicalName());
}
}
return partialObject;
}
/**
* Resolves alias from a real field name.
*
* @param fieldName The field name to retrieve alias
* @return The alias in case of success, otherwise, the field name
* @since 19.01.30
*/
private String resolveAlias(final String fieldName) {
if (this.aliases != null && this.aliases.containsValue(fieldName)) {
for (final Map.Entry<String, String> e : this.aliases.entrySet()) {
if (e.getValue().compareToIgnoreCase(fieldName) == 0) {
return e.getKey();
}
}
}
return fieldName;
}
} |
package de.bmoth.backend.z3;
import com.microsoft.z3.BoolExpr;
import com.microsoft.z3.Context;
import com.microsoft.z3.Expr;
import com.microsoft.z3.Sort;
import de.bmoth.parser.ast.nodes.*;
import java.util.*;
public class MachineToZ3Translator {
private final MachineNode machineNode;
private final Context z3Context;
private BoolExpr initialisationConstraint = null;
private BoolExpr invariantConstraint = null;
private final HashMap<String, String> primedVariablesToVariablesMap;
private final List<BoolExpr> operationConstraints;
public MachineToZ3Translator(MachineNode machineNode, Context ctx) {
this.machineNode = machineNode;
this.z3Context = ctx;
if (machineNode.getInitialisation() != null) {
this.initialisationConstraint = visitSubstitution(machineNode.getInitialisation());
}
if (machineNode.getInvariant() != null) {
this.invariantConstraint = (BoolExpr) FormulaToZ3Translator.translatePredicate(machineNode.getInvariant(),
z3Context);
} else {
this.invariantConstraint = z3Context.mkTrue();
}
this.operationConstraints = visitOperations(machineNode.getOperations());
primedVariablesToVariablesMap = new HashMap<>();
for (DeclarationNode node : machineNode.getVariables()) {
primedVariablesToVariablesMap.put(getPrimedName(node.getName()), node.getName());
}
}
private List<BoolExpr> visitOperations(List<OperationNode> operations) {
List<BoolExpr> results = new ArrayList<>(operations.size());
for (OperationNode operationNode : this.machineNode.getOperations()) {
BoolExpr temp = visitSubstitution(operationNode.getSubstitution());
// for unassigned variables add a dummy assignment, e.g. x' = x
Set<DeclarationNode> set = new HashSet<>(this.getVariables());
set.removeAll(operationNode.getSubstitution().getAssignedVariables());
for (DeclarationNode node : set) {
BoolExpr mkEq = z3Context.mkEq(getPrimedVariable(node), getVariableAsZ3Expression(node));
temp = z3Context.mkAnd(temp, mkEq);
}
results.add(temp);
}
return results;
}
public List<DeclarationNode> getVariables() {
return machineNode.getVariables();
}
public List<DeclarationNode> getConstants() {
return machineNode.getConstants();
}
public Expr getVariableAsZ3Expression(DeclarationNode node) {
Sort type = FormulaToZ3Translator.bTypeToZ3Sort(z3Context, node.getType());
Expr expr = z3Context.mkConst(node.getName(), type);
return expr;
}
public Expr getVariable(DeclarationNode node) {
Sort type = FormulaToZ3Translator.bTypeToZ3Sort(z3Context, node.getType());
Expr expr = z3Context.mkConst(node.getName(), type);
return expr;
}
public Expr getPrimedVariable(DeclarationNode node) {
String primedName = getPrimedName(node.getName());
Sort type = FormulaToZ3Translator.bTypeToZ3Sort(z3Context, node.getType());
Expr expr = z3Context.mkConst(primedName, type);
return expr;
}
public BoolExpr getInitialValueConstraint() {
PredicateNode properties = machineNode.getProperties();
BoolExpr prop = z3Context.mkTrue();
if (properties != null) {
prop = FormulaToZ3Translator.translatePredicate(machineNode.getProperties(), z3Context);
}
if (initialisationConstraint == null) {
return prop;
}
return z3Context.mkAnd(initialisationConstraint, prop);
}
public BoolExpr getInvariantConstraint() {
return invariantConstraint;
}
private BoolExpr visitSubstitution(SubstitutionNode node) {
if (node instanceof SingleAssignSubstitutionNode) {
return visitSingleAssignSubstitution((SingleAssignSubstitutionNode) node);
} else if (node instanceof ParallelSubstitutionNode) {
return visitParallelSubstitution((ParallelSubstitutionNode) node);
} else if (node instanceof AnySubstitutionNode) {
return visitAnySubstitution((AnySubstitutionNode) node);
} else if (node instanceof SelectSubstitutionNode) {
return visitSelectSubstitutionNode((SelectSubstitutionNode) node);
}
throw new AssertionError("Not implemented" + node.getClass());
}
private BoolExpr visitSelectSubstitutionNode(SelectSubstitutionNode node) {
BoolExpr condition = (BoolExpr) FormulaToZ3Translator.translatePredicate(node.getCondition(), z3Context);
BoolExpr substitution = visitSubstitution(node.getSubstitution());
return z3Context.mkAnd(condition, substitution);
}
private BoolExpr visitAnySubstitution(AnySubstitutionNode node) {
Expr[] parameters = new Expr[node.getParameters().size()];
for (int i = 0; i < parameters.length; i++) {
parameters[i] = getVariableAsZ3Expression(node.getParameters().get(i));
}
BoolExpr parameterConstraints = (BoolExpr) FormulaToZ3Translator.translatePredicate(node.getWherePredicate(), z3Context);
BoolExpr transition = visitSubstitution(node.getThenSubstitution());
BoolExpr existsBody = z3Context.mkAnd(parameterConstraints, transition);
return z3Context.mkExists(parameters, existsBody, parameters.length, null, null, null, null);
}
private BoolExpr visitParallelSubstitution(ParallelSubstitutionNode node) {
List<SubstitutionNode> substitutions = node.getSubstitutions();
BoolExpr boolExpr = null;
for (SubstitutionNode substitutionNode : substitutions) {
BoolExpr temp = visitSubstitution(substitutionNode);
if (boolExpr == null) {
boolExpr = temp;
} else {
boolExpr = z3Context.mkAnd(boolExpr, temp);
}
}
return boolExpr;
}
private BoolExpr visitSingleAssignSubstitution(SingleAssignSubstitutionNode node) {
String name = getPrimedName(node.getIdentifier().getName());
return FormulaToZ3Translator.translateVariableEqualToExpr(name, node.getValue(), z3Context);
}
private String getPrimedName(String name) {
return name + "'";
}
public List<BoolExpr> getOperationConstraints() {
return operationConstraints;
}
} |
package de.mxro.maven.tools;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.text.SimpleDateFormat;
import java.util.Date;
import de.mxro.process.Spawn;
public class MavenRemoteRepository {
public static void downloadOrCreateRepositoryXml(final String repositoryUrl, final Path destFolder,
final String groupId, final String artifactId) throws IOException {
if (Files.exists(destFolder.resolve("maven-metadata.xml"))) {
throw new RuntimeException("maven-metadata.xml file already existed in folder: " + destFolder);
}
final String path = repositoryUrl + groupId.replaceAll("\\.", "/") + "/" + artifactId + "/maven-metadata.xml";
final String output = Spawn.runBashCommand("wget " + path, destFolder.toFile());
System.out.println(output);
if (output.contains("ERROR 404") || output.contains("Not Found")) {
final Path file = Files.createFile(destFolder.resolve("maven-metadata.xml"));
String xml = "";
xml += "<metadata>\n";
xml += " <groupId>" + groupId + "</groupId>\n";
xml += " <artifactId>" + artifactId + "</artifactId>\n";
xml += " <versioning>\n";
xml += " <release>0.0.0</release>\n";
xml += " <versions>\n";
xml += " </versions>\n";
xml += " <lastUpdated>00000</lastUpdated>\n";
xml += " </versioning>\n";
xml += "</metadata>";
Files.write(file, xml.getBytes("UTF-8"));
return;
}
if (output.contains("ERROR")) {
throw new RuntimeException("Error while downloading repository index file from: " + path);
}
}
public static void assertVersionInRepositoryXml(final Path destFolder, final String newVersion) throws Exception {
final Path mavenMetadata = destFolder.resolve("maven-metadata.xml");
final byte[] mavenMetadataBytes = Files.readAllBytes(mavenMetadata);
String mavenMetadataString = new String(mavenMetadataBytes, "UTF-8");
if (!mavenMetadataString.contains(newVersion)) {
final int versionsEndTagIndex = mavenMetadataString.indexOf("</versions>");
mavenMetadataString = mavenMetadataString.substring(0, versionsEndTagIndex) + "\n <version>"
+ newVersion + "</version>\n" + mavenMetadataString.substring(versionsEndTagIndex);
}
final String lastChanged = new SimpleDateFormat("yyyyMMddHHmmss").format(new Date());
mavenMetadataString = mavenMetadataString.replaceAll("<lastUpdated>[^<]*</lastUpdated>", "<lastUpdated>"
+ lastChanged + "</lastUpdated>");
mavenMetadataString = mavenMetadataString.replaceAll("<release>[^<]*</release>", "<release>" + newVersion
+ "</release>");
Files.write(mavenMetadata, mavenMetadataString.getBytes("UTF-8"));
}
} |
package de.slackspace.openkeepass;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
import java.util.zip.GZIPInputStream;
import org.bouncycastle.util.encoders.Base64;
import de.slackspace.openkeepass.crypto.Decrypter;
import de.slackspace.openkeepass.crypto.ProtectedStringCrypto;
import de.slackspace.openkeepass.crypto.Salsa20;
import de.slackspace.openkeepass.crypto.Sha256;
import de.slackspace.openkeepass.domain.CompressionAlgorithm;
import de.slackspace.openkeepass.domain.CrsAlgorithm;
import de.slackspace.openkeepass.domain.KeePassFile;
import de.slackspace.openkeepass.domain.KeePassHeader;
import de.slackspace.openkeepass.domain.KeyFile;
import de.slackspace.openkeepass.exception.KeePassDatabaseUnreadable;
import de.slackspace.openkeepass.parser.KeePassDatabaseXmlParser;
import de.slackspace.openkeepass.parser.KeyFileXmlParser;
import de.slackspace.openkeepass.stream.HashedBlockInputStream;
import de.slackspace.openkeepass.util.ByteUtils;
import de.slackspace.openkeepass.util.StreamUtils;
/**
* A KeePassDatabase is the central API class to read a KeePass database file.
* <p>
* Currently the following KeePass files are supported:
*
* <ul>
* <li>KeePass Database V2 with password</li>
* <li>KeePass Database V2 with keyfile</li>
* </ul>
*
* A typical use-case should use the following idiom:
* <pre>
* // open database
* KeePassFile database = KeePassDatabase.getInstance(keePassDatabase).openDatabase("secret");
*
* // get password entries
* List<Entry> entries = database.getEntries();
* ...
* </pre>
*
* If the database could not be opened a <tt>RuntimeException</tt> will be thrown.
*
* @see KeePassFile
*
*/
public class KeePassDatabase {
// KeePass 2.x signature
private static final int DATABASE_V2_FILE_SIGNATURE_1 = 0x9AA2D903 & 0xFF;
private static final int DATABASE_V2_FILE_SIGNATURE_2 = 0xB54BFB67 & 0xFF;
// KeePass 1.x signature
private static final int OLD_DATABASE_V1_FILE_SIGNATURE_1 = 0x9AA2D903 & 0xFF;
private static final int OLD_DATABASE_V1_FILE_SIGNATURE_2 = 0xB54BFB65 & 0xFF;
// KeePass version signature length in bytes
public static final int VERSION_SIGNATURE_LENGTH = 12;
private KeePassHeader keepassHeader = new KeePassHeader();
private byte[] keepassFile;
protected Decrypter decrypter = new Decrypter();
protected KeePassDatabaseXmlParser keePassDatabaseXmlParser = new KeePassDatabaseXmlParser();
protected KeyFileXmlParser keyFileXmlParser = new KeyFileXmlParser();
private KeePassDatabase(InputStream inputStream) {
try {
keepassFile = StreamUtils.toByteArray(inputStream);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Retrieves a KeePassDatabase instance. The instance returned is based on the given database filename and tries to parse the database header of it.
*
* @param keePassDatabaseFile a KeePass database filename, must not be NULL
* @return a KeePassDatabase
*/
public static KeePassDatabase getInstance(String keePassDatabaseFile) {
return getInstance(new File(keePassDatabaseFile));
}
/**
* Retrieves a KeePassDatabase instance. The instance returned is based on the given database file and tries to parse the database header of it.
*
* @param keePassDatabaseFile a KeePass database file, must not be NULL
* @return a KeePassDatabase
*/
public static KeePassDatabase getInstance(File keePassDatabaseFile) {
if(keePassDatabaseFile == null) {
throw new IllegalArgumentException("You must provide a valid KeePass database file.");
}
try {
return getInstance(new FileInputStream(keePassDatabaseFile));
} catch (FileNotFoundException e) {
throw new IllegalArgumentException("The KeePass database file could not be found. You must provide a valid KeePass database file.");
}
}
/**
* Retrieves a KeePassDatabase instance. The instance returned is based on the given input stream and tries to parse the database header of it.
*
* @param keePassDatabaseStream an input stream of a KeePass database, must not be NULL
* @return a KeePassDatabase
*/
public static KeePassDatabase getInstance(InputStream keePassDatabaseStream) {
if(keePassDatabaseStream == null) {
throw new IllegalArgumentException("You must provide a non-empty KeePass database stream.");
}
KeePassDatabase reader = new KeePassDatabase(keePassDatabaseStream);
try {
reader.checkVersionSupport();
reader.readHeader();
return reader;
}
catch(IOException e) {
throw new RuntimeException("Could not read input stream", e);
}
}
private void checkVersionSupport() throws IOException {
BufferedInputStream bufferedInputStream = new BufferedInputStream(new ByteArrayInputStream(keepassFile));
byte[] signature = new byte[VERSION_SIGNATURE_LENGTH];
bufferedInputStream.read(signature);
ByteBuffer signatureBuffer = ByteBuffer.wrap(signature);
signatureBuffer.order(ByteOrder.LITTLE_ENDIAN);
int signaturePart1 = ByteUtils.toUnsignedInt(signatureBuffer.getInt());
int signaturePart2 = ByteUtils.toUnsignedInt(signatureBuffer.getInt());
if(signaturePart1 == DATABASE_V2_FILE_SIGNATURE_1 && signaturePart2 == DATABASE_V2_FILE_SIGNATURE_2) {
return;
}
else if(signaturePart1 == OLD_DATABASE_V1_FILE_SIGNATURE_1 && signaturePart2 == OLD_DATABASE_V1_FILE_SIGNATURE_2) {
throw new UnsupportedOperationException("The provided KeePass database file seems to be from KeePass 1.x which is not supported!");
}
else {
throw new UnsupportedOperationException("The provided file seems to be no KeePass database file!");
}
}
private void readHeader() throws IOException {
BufferedInputStream bufferedInputStream = new BufferedInputStream(new ByteArrayInputStream(keepassFile));
bufferedInputStream.skip(VERSION_SIGNATURE_LENGTH); // skip version
while(true) {
try {
int fieldId = bufferedInputStream.read();
byte[] fieldLength = new byte[2];
bufferedInputStream.read(fieldLength);
ByteBuffer fieldLengthBuffer = ByteBuffer.wrap(fieldLength);
fieldLengthBuffer.order(ByteOrder.LITTLE_ENDIAN);
int fieldLengthInt = ByteUtils.toUnsignedInt(fieldLengthBuffer.getShort());
if(fieldLengthInt > 0) {
byte[] data = new byte[fieldLengthInt];
bufferedInputStream.read(data);
keepassHeader.setValue(fieldId, data);
keepassHeader.increaseHeaderSize(fieldLengthInt + 3);
}
if(fieldId == 0) {
break;
}
} catch (IOException e) {
throw new RuntimeException("Could not read header input", e);
}
}
}
/**
* Opens a KeePass database with the given password and returns the KeePassFile for further processing.
* <p>
* If the database cannot be decrypted with the provided password an exception will be thrown.
*
* @param password the password to open the database
* @return a KeePassFile
* @see KeePassFile
*/
public KeePassFile openDatabase(String password) {
if(password == null) {
throw new IllegalArgumentException("The password for the database must not be null. Please provide a valid password.");
}
try {
byte[] passwordBytes = password.getBytes("UTF-8");
byte[] hashedPassword = Sha256.hash(passwordBytes);
return decryptAndParseDatabase(hashedPassword);
} catch (UnsupportedEncodingException e) {
throw new UnsupportedOperationException("The encoding UTF-8 is not supported");
}
}
/**
* Opens a KeePass database with the given password and returns the KeePassFile for further processing.
* <p>
* If the database cannot be decrypted with the provided password an exception will be thrown.
*
* @param keyFile the password to open the database
* @return a KeePassFile the keyfile to open the database
* @see KeePassFile
*/
public KeePassFile openDatabase(File keyFile) {
if(keyFile == null) {
throw new IllegalArgumentException("You must provide a valid KeePass keyfile.");
}
try {
return openDatabase(new FileInputStream(keyFile));
} catch (FileNotFoundException e) {
throw new IllegalArgumentException("The KeePass keyfile could not be found. You must provide a valid KeePass keyfile.");
}
}
/**
* Opens a KeePass database with the given keyfile stream and returns the KeePassFile for further processing.
* <p>
* If the database cannot be decrypted with the provided keyfile an exception will be thrown.
*
* @param keyFileStream the keyfile to open the database as stream
* @return a KeePassFile
* @see KeePassFile
*/
public KeePassFile openDatabase(InputStream keyFileStream) {
if(keyFileStream == null) {
throw new IllegalArgumentException("You must provide a non-empty KeePass keyfile stream.");
}
try {
KeyFile keyFile = keyFileXmlParser.parse(keyFileStream);
byte[] protectedBuffer = Base64.decode(keyFile.getKey().getData().getBytes("UTF-8"));
return decryptAndParseDatabase(protectedBuffer);
} catch (UnsupportedEncodingException e) {
throw new UnsupportedOperationException("The encoding UTF-8 is not supported");
}
}
private KeePassFile decryptAndParseDatabase(byte[] key) {
try {
byte[] aesDecryptedDbFile = decrypter.decryptDatabase(key, keepassHeader, keepassFile);
byte[] startBytes = new byte[32];
ByteArrayInputStream decryptedStream = new ByteArrayInputStream(aesDecryptedDbFile);
decryptedStream.read(startBytes);
// compare startBytes
if(!Arrays.equals(keepassHeader.getStreamStartBytes(), startBytes)) {
throw new KeePassDatabaseUnreadable("The keepass database file seems to be corrupt or cannot be decrypted.");
}
HashedBlockInputStream hashedBlockInputStream = new HashedBlockInputStream(decryptedStream);
byte[] hashedBlockBytes = StreamUtils.toByteArray(hashedBlockInputStream);
byte[] decompressed = hashedBlockBytes;
// unzip if necessary
if(keepassHeader.getCompression().equals(CompressionAlgorithm.Gzip)) {
GZIPInputStream gzipInputStream = new GZIPInputStream(new ByteArrayInputStream(hashedBlockBytes));
decompressed = StreamUtils.toByteArray(gzipInputStream);
}
ProtectedStringCrypto protectedStringCrypto;
if(keepassHeader.getCrsAlgorithm().equals(CrsAlgorithm.Salsa20)) {
protectedStringCrypto = Salsa20.createInstance(keepassHeader.getProtectedStreamKey());
}
else {
throw new UnsupportedOperationException("Only Salsa20 is supported as CrsAlgorithm at the moment!");
}
return keePassDatabaseXmlParser.parse(new ByteArrayInputStream(decompressed), protectedStringCrypto);
} catch (IOException e) {
throw new RuntimeException("Could not open database file", e);
}
}
/**
* Gets the KeePassDatabase header.
*
* @return the database header
*/
public KeePassHeader getHeader() {
return keepassHeader;
}
} |
package de.themoep.inventorygui;
import org.bukkit.inventory.ItemStack;
import java.util.function.Supplier;
/**
* Represents an element in a gui that will query all it's data when drawn.
*/
public class DynamicGuiElement extends GuiElement {
private Supplier<GuiElement> query;
private GuiElement cachedElement;
private long lastCached = 0;
/**
* Represents an element in a gui that will query all it's data when drawn.
* @param slotChar The character to replace in the gui setup string
* @param query Query the element data, this should return an element with the information
*/
public DynamicGuiElement(char slotChar, Supplier<GuiElement> query) {
super(slotChar);
this.query = query;
update();
}
/**
* Query this element's state even if it shouldn't be done yet
*/
public void update() {
lastCached = System.currentTimeMillis();
cachedElement = query.get();
cachedElement.setGui(gui);
}
@Override
public void setGui(InventoryGui gui) {
super.setGui(gui);
if (cachedElement != null) {
cachedElement.setGui(gui);
}
}
@Override
public ItemStack getItem(int slot) {
update();
return getCachedElement().getItem(slot);
}
@Override
public Action getAction() {
update();
return getCachedElement().getAction();
}
/**
* Get the supplier for this element's content
* @return The supplier query
*/
public Supplier<GuiElement> getQuery() {
return query;
}
/**
* Set the supplier for this element's content
* @param query The supplier query to set
*/
public void setQuery(Supplier<GuiElement> query) {
this.query = query;
}
/**
* Get the cached element, creates a new one if there is none
* @return The element that is currently cached
*/
public GuiElement getCachedElement() {
if (cachedElement == null) {
update();
}
return cachedElement;
}
/**
* Get the time at which this element was last cached
* @return The timestamp from when it was last cached
*/
public long getLastCached() {
return lastCached;
}
} |
package eu.europeana.controller;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
import eu.europeana.api.client.EuropeanaApi2Client;
import eu.europeana.api.client.exception.EuropeanaApiProblem;
import eu.europeana.api.client.model.EuropeanaApi2Results;
import eu.europeana.api.client.model.search.EuropeanaApi2Item;
import eu.europeana.api.client.search.query.Api2Query;
import eu.europeana.api.client.search.query.EuropeanaComplexQuery;
import eu.europeana.model.LongLat;
import eu.europeana.model.RoyaltyObj;
@Controller
public class HeatmapController {
@RequestMapping(value="/heatmap", method=RequestMethod.GET)
public ModelAndView getHeatmap() {
ModelAndView mav = new ModelAndView("heatmap");
mav.addObject("lists", returnDummyValues() );
return mav;
}
@RequestMapping(value="/heatmap", method=RequestMethod.POST)
public ModelAndView postHeatmap(
@RequestParam(value = "search") String search,
@RequestParam(value = "contentType", required = false) String contentType,
HttpServletRequest request) {
//testoutput
System.out.println("serach: " + request.getParameter("search"));
System.out.println("contentType: " + request.getParameter("contentType"));
System.out.println(EuropeanaComplexQuery.TYPE.SOUND);
System.out.println(EuropeanaComplexQuery.TYPE.IMAGE);
System.out.println(EuropeanaComplexQuery.TYPE.TEXT);
List<String> paramList = new ArrayList<String>();
paramList.add(request.getParameter("search"));
paramList.add(request.getParameter(request.getParameter("contentType")));
//make the query
Api2Query europeanaQuery = new Api2Query();
europeanaQuery.setTitle(search);
europeanaQuery.setProfile("rich");
//europeanaQuery.setWholeSubQuery("longitude");
String content = request.getParameter("contentType");
if(content.trim().equals(EuropeanaComplexQuery.TYPE.IMAGE)) {
europeanaQuery.setType(content);
}
if(content.trim().equals(EuropeanaComplexQuery.TYPE.SOUND)) {
europeanaQuery.setType(content);
}
if(content.trim().equals(EuropeanaComplexQuery.TYPE.TEXT)) {
europeanaQuery.setType(content);
}
EuropeanaApi2Client europeanaClient = new EuropeanaApi2Client();
EuropeanaApi2Results results = new EuropeanaApi2Results();
try{
results = europeanaClient.searchApi2(europeanaQuery, 15, 1);
} catch(IOException e) {
e.printStackTrace();
} catch(EuropeanaApiProblem e1) {
e1.printStackTrace();
}
//test find longitude and latitude
for (EuropeanaApi2Item item: results.getAllItems()){
System.out.println("item: " + item.getLink());
System.out.println("getEdmIsShownAt:");
for(String s : item.getEdmPreview()) {
System.out.println("edm: " + s);
}
}
ModelAndView mav = new ModelAndView("heatmap");
mav.addObject("lists", returnDummyValues() );
return mav;
}
public List<LongLat> returnDummyValues() {
List<LongLat> list = new ArrayList<LongLat>();
list.add(new LongLat(52.5, 13.4, 1)); //test values:
list.add(new LongLat(52.34, 13.5, 1));
list.add(new LongLat(52.45, 13.6, 1));
list.add(new LongLat(52.15, 13.7, 1));
list.add(new LongLat(52.34, 13.8, 1));
list.add(new LongLat(52.1, 13.9, 1));
return list;
}
} |
package hudson.plugins.perforce;
import hudson.scm.*;
import hudson.model.AbstractBuild;
import hudson.model.Action;
import hudson.model.Hudson;
import hudson.model.TaskListener;
import hudson.model.LargeText;
import hudson.scm.SubversionSCM.SvnInfo;
import hudson.util.CopyOnWriteMap;
import hudson.util.FormFieldValidator;
import static hudson.Util.fixEmpty;
import static hudson.Util.fixNull;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.wc.SVNClientManager;
import org.tmatesoft.svn.core.wc.SVNCopyClient;
import org.tmatesoft.svn.core.wc.SVNRevision;
import javax.servlet.ServletException;
import java.io.IOException;
import java.io.PrintStream;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.List;
import java.util.ArrayList;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import java.util.Map.Entry;
import java.lang.ref.WeakReference;
import com.tek42.perforce.*;
import com.tek42.perforce.model.*;
/**
* {@link Action} that lets people create tag for the given build.
*
* @author Mike Wille
*/
public class PerforceTagAction extends AbstractScmTagAction {
private int changeNumber;
private Depot depot;
private String tag;
private String desc;
private String view;
public PerforceTagAction(AbstractBuild build, Depot depot, int changeNumber, String view) {
super(build);
this.depot = depot;
this.changeNumber = changeNumber;
this.view = view;
}
public int getChangeNumber() {
return changeNumber;
}
public String getIconFileName() {
if(tag == null && !Hudson.isAdmin())
return null;
return "save.gif";
}
public String getDisplayName() {
if(isTagged())
return "Perforce Label";
else
return "Label This Build";
}
public String getTag() {
return tag;
}
public void setTag(String tag) {
this.tag = tag;
}
public String getDescription() {
return desc;
}
public void setDescription(String desc) {
this.desc = desc;
}
/**
* Returns true if this build has already been tagged at least once.
*/
public boolean isTagged() {
if(tag == null)
return false;
return true;
}
/**
* Checks to see if the user entered tag matches any Perforce restrictions.
*/
public String isInvalidTag(String tag) {
Pattern spaces = Pattern.compile("\\s{1,}");
Matcher m = spaces.matcher(tag);
if(m.find()) {
return "Spaces are not allowed.";
}
return null;
}
/**
* Checks if the value is a valid Perforce tag (label) name.
*/
public synchronized void doCheckTag(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
new FormFieldValidator(req,rsp,false) {
protected void check() throws IOException, ServletException {
String tag = fixEmpty(request.getParameter("value")).trim();
if(tag == null) {// nothing entered yet
ok();
return;
}
error(isInvalidTag(tag));
}
}.check();
}
/**
* Invoked to actually tag the workspace.
*/
public synchronized void doSubmit(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
if(!Hudson.adminCheck(req,rsp))
return;
tag = req.getParameter("name");
desc = req.getParameter("desc");
Label label = new Label();
label.setName(tag);
label.setDescription(desc);
label.setRevision(new Integer(changeNumber).toString());
label.addView(view);
try {
depot.getLabels().saveLabel(label);
} catch(PerforceException e) {
tag = null;
desc = null;
e.printStackTrace();
throw new IOException("Failed to issue perforce label.", e);
}
build.save();
rsp.sendRedirect(".");
}
} |
package in.twizmwaz.cardinal.command;
import com.sk89q.minecraft.util.commands.*;
import in.twizmwaz.cardinal.GameHandler;
import in.twizmwaz.cardinal.chat.ChatConstant;
import in.twizmwaz.cardinal.chat.LocalizedChatMessage;
import in.twizmwaz.cardinal.chat.UnlocalizedChatMessage;
import in.twizmwaz.cardinal.match.MatchState;
import in.twizmwaz.cardinal.module.modules.team.TeamModule;
import in.twizmwaz.cardinal.rotation.LoadedMap;
import in.twizmwaz.cardinal.util.TeamUtils;
import org.bukkit.ChatColor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import java.util.Locale;
public class CycleCommand {
@Command(aliases = {"cycle"}, desc = "Cycles the world and loads a new world.", usage = "[time]", flags = "f")
@CommandPermissions("cardinal.match.cycle")
public static void cycle(final CommandContext cmd, CommandSender sender) throws CommandException {
if (GameHandler.getGameHandler().getMatch().isRunning()) {
if(cmd.hasFlag('f')){
try {
TeamModule team = TeamUtils.getTeamByName(cmd.getString(0));
GameHandler.getGameHandler().getMatch().end(team);
} catch (IndexOutOfBoundsException ex) {
GameHandler.getGameHandler().getMatch().end(null);
}
} else {
throw new CommandException(new LocalizedChatMessage(ChatConstant.ERROR_CYCLE_DURING_MATCH).getMessage(sender instanceof Player ? ((Player) sender).getLocale() : Locale.getDefault().toString()));
}
} else if (GameHandler.getGameHandler().getMatch().getState().equals(MatchState.STARTING))
throw new CommandException(new LocalizedChatMessage(ChatConstant.ERROR_CYCLE_DURING_MATCH).getMessage(sender instanceof Player ? ((Player) sender).getLocale() : Locale.getDefault().toString()));
if (GameHandler.getGameHandler().getCycleTimer() != null)
GameHandler.getGameHandler().getCycleTimer().setCancelled(true);
try {
GameHandler.getGameHandler().startCycleTimer(cmd.getInteger(0));
} catch (IndexOutOfBoundsException e) {
GameHandler.getGameHandler().startCycleTimer(30);
}
}
@Command(aliases = {"setnext", "sn"}, desc = "Sets the next map.", usage = "[map]", min = 1)
@CommandPermissions("cardinal.match.setnext")
public static void setNext(final CommandContext cmd, CommandSender sender) throws CommandException {
String input = cmd.getJoinedStrings(0).replaceAll(" ", "");
LoadedMap nextMap = null;
for (LoadedMap loadedMap : GameHandler.getGameHandler().getRotation().getLoaded()) {
if (loadedMap.getName().toLowerCase().replaceAll(" ", "").equalsIgnoreCase(input.toLowerCase())) {
nextMap = loadedMap;
}
}
if (nextMap == null) {
for (LoadedMap loadedMap : GameHandler.getGameHandler().getRotation().getLoaded()) {
if (loadedMap.getName().toLowerCase().replaceAll(" ", "").startsWith(input.toLowerCase())) {
nextMap = loadedMap;
}
}
}
if (nextMap == null) {
throw new CommandException(new LocalizedChatMessage(ChatConstant.ERROR_NO_MAP_MATCH).getMessage(sender instanceof Player ? ((Player) sender).getLocale() : Locale.getDefault().toString()));
} else {
GameHandler.getGameHandler().getCycle().setMap(nextMap);
sender.sendMessage(ChatColor.DARK_PURPLE + new LocalizedChatMessage(ChatConstant.GENERIC_MAP_SET, ChatColor.GOLD + nextMap.getName() + ChatColor.DARK_PURPLE).getMessage(sender instanceof Player ? ((Player) sender).getLocale() : Locale.getDefault().toString()));
}
}
} |
package innovimax.mixthem.join;
import innovimax.mixthem.exceptions.MixException;
import innovimax.mixthem.interfaces.IJoinLine;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
/**
* <p>Joins two lines on a common field.</p>
* <p>This is the default implementation of IJoinLine.</p>
* @see IJoinLine
* @author Innovimax
* @version 1.0
*/
public class DefaultLineJoining implements IJoinLine {
@Override
public JoinType getType(List<String> params) {
JoinType type;
if (params.size() == 0) {
type = JoinType._DEFAULT;
} else if (params.size() == 1) {
type = JoinType._SAME_COL;
} else {
type = JoinType._DIFF_COL;
}
return type;
}
@Override
public List<Integer> getColumns(List<String> params) throws MixException {
try {
return params.stream().map(s -> new Integer(s)).collect(Collectors.toList());
} catch (NumberFormatException e) {
throw new MixException("Unexpected join parameter values " + params.toString(), e);
}
}
@Override
public String join(String line1, String line2, JoinType type, List<Integer> columns) throws MixException {
String join = null;
if (line1 != null && line2 != null) {
List<String> list1 = Arrays.asList(line1.split("\\s"));
List<String> list2 = Arrays.asList(line2.split("\\s"));
switch (type) {
case _DEFAULT:
if (list1.size() > 0 && list2.contains(list1.get(0))) {
String part1 = list1.stream().collect(Collectors.joining(" "));
String part2 = list2.stream().filter(s -> !list1.contains(s)).collect(Collectors.joining(" "));
join = part1 + " " + part2;
}
break;
case _SAME_COL:
int col = columns.get(0).intValue();
if (list1.size() >= col && list2.size() >= col && list1.get(col - 1).equals(list2.get(col - 1))) {
String part1 = list1.get(col - 1);
String part2 = list1.stream().filter(s -> !s.equals(part1)).collect(Collectors.joining(" "));
String part3 = list2.stream().filter(s -> !list1.contains(s)).collect(Collectors.joining(" "));
join = part1 + " " + part2 + " " + part3;
}
break;
case _DIFF_COL:
int col1 = columns.get(0).intValue();
int col2 = columns.get(1).intValue();
if (list1.size() >= col1 && list2.size() >= col2 && list1.get(col1 - 1).equals(list2.get(col2 - 1))) {
String part1 = list1.get(col1 - 1);
String part2 = list1.stream().filter(s -> !s.equals(part1)).collect(Collectors.joining(" "));
String part3 = list2.stream().filter(s -> !list1.contains(s)).collect(Collectors.joining(" "));
join = part1 + " " + part2 + " " + part3;
}
break;
}
}
return join;
}
} |
package io.atomicbits.scraml.mvnplugin;
import io.atomicbits.scraml.generator.ScramlGenerator;
import io.atomicbits.scraml.mvnplugin.util.ListUtils;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.MavenProject;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
@Mojo(name = "scraml")
public class ScramlMojo extends AbstractMojo {
@Parameter(defaultValue="${project}", readonly=true, required=true)
private MavenProject project;
/**
* Scraml file pointer to the RAML specification main file.
*/
@Parameter(property = "scraml.ramlApi", defaultValue = "")
private String ramlApi;
/**
* Scraml base directory to find the RAML files.
*/
@Parameter(property = "scraml.resourceDirectory", defaultValue = "src/main/resources")
private String resourceDirectory;
/**
* Scraml client source generation output directory.
*/
@Parameter(property = "scraml.outputDirectory", defaultValue = "target/generated-sources/scraml")
private String outputDirectory;
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
if (!ramlApi.isEmpty()) {
File ramlBaseDir;
File ramlSource;
if(resourceDirectory.startsWith("/")) {
ramlBaseDir = new File(resourceDirectory);
ramlSource = new File(ramlBaseDir, ramlApi);
} else {
File baseDir = project.getBasedir();
ramlBaseDir = new File(baseDir, resourceDirectory);
ramlSource = new File(ramlBaseDir, ramlApi);
}
String[] apiPackageAndClass = packageAndClassFromRamlPointer(ramlApi);
String apiPackageName = apiPackageAndClass[0];
String apiClassName = apiPackageAndClass[1];
Map<String, String> generatedFiles;
try {
generatedFiles = ScramlGenerator.generateJavaCode(ramlSource.toURI().toURL().toString(), apiPackageName, apiClassName);
} catch (MalformedURLException | NullPointerException e) {
feedbackOnException(ramlBaseDir, ramlApi, ramlSource);
throw new RuntimeException("Could not generate RAML client.", e);
}
File outputDirAsFile = new File(outputDirectory);
outputDirAsFile.mkdirs();
try {
for (Map.Entry<String, String> entry : generatedFiles.entrySet()) {
String filePath = entry.getKey();
String content = entry.getValue();
File fileInDst = new File(outputDirAsFile, filePath);
fileInDst.getParentFile().mkdirs();
FileWriter writer = new FileWriter(fileInDst);
writer.write(content);
writer.close();
}
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException("Could not generate RAML client.", e);
}
project.addCompileSourceRoot(outputDirectory);
}
}
private String escape(char ch) {
return "\\Q" + ch + "\\E";
}
private String[] packageAndClassFromRamlPointer(String pointer) {
String[] parts = pointer.split(escape('/'));
if (parts.length == 1) {
return new String[]{"io.atomicbits", cleanFileName(parts[0])};
} else {
String className = cleanFileName(parts[parts.length - 1]);
List<String> firstParts = Arrays.asList(parts).subList(0, parts.length - 1); // toIndex is exclusive
String packageParts = ListUtils.mkString(firstParts, ".");
return new String[]{packageParts, className};
}
}
private String cleanFileName(String fileName) {
String[] nameSplit = fileName.split(escape('.'));
String withOutExtension;
if (nameSplit.length == 0) {
withOutExtension = fileName;
} else {
withOutExtension = nameSplit[0];
}
// capitalize after special characters and drop those characters along the way
List<Character> dropChars = Arrays.asList('-', '_', '+', ' ');
String cleanedDropChars = withOutExtension;
for (Character dropChar : dropChars) {
List<String> items = removeEmpty(Arrays.asList(cleanedDropChars.split(escape(dropChar))));
List<String> capitalized = new ArrayList<>();
for (String item : items) {
capitalized.add((capitalize(item)));
}
cleanedDropChars = ListUtils.mkString(capitalized, "");
}
// capitalize after numbers 0 to 9, but keep the numbers
List<Character> numbers = Arrays.asList('0', '1', '2', '3', '4', '5', '6', '7', '8', '9');
// Make sure we don't drop the occurrences of numbers at the end by adding a space and removing it later.
String cleanedNumbers = cleanedDropChars + " ";
for (Character number : numbers) {
List<String> items = Arrays.asList(cleanedNumbers.split(escape(number))); // it's important NOT to remove the empty strings here
List<String> capitalized = new ArrayList<>();
for (String item : items) {
capitalized.add((capitalize(item)));
}
cleanedNumbers = ListUtils.mkString(capitalized, number.toString());
}
// final cleanup of all strange characters
return cleanedNumbers.replaceAll("[^A-Za-z0-9]", "").trim();
}
private String capitalize(String dirtyName) {
char[] chars = dirtyName.toCharArray();
if (chars.length > 0) {
chars[0] = Character.toUpperCase(chars[0]);
}
return new String(chars);
}
private List<String> removeEmpty(List<String> items) {
List<String> emptied = new ArrayList<>();
for (String item : items) {
if (!item.isEmpty()) {
emptied.add(item);
}
}
return emptied;
}
private void feedbackOnException(File ramlBaseDir,
String ramlPointer,
File ramlSource) {
System.out.println(
"Exception during RAMl parsing, possibly caused by a wrong RAML path.\n" +
"Are you sure the following values are correct (non-null)?\n\n" +
"- - - - - - - - - - - - - - - - - - - - - - -\n" +
"RAML base path: " + ramlBaseDir + "\n" +
"RAML relative path: " + ramlPointer + "\n" +
"RAML absolute path" + ramlSource + "\n" +
"- - - - - - - - - - - - - - - - - - - - - - -\n\n" +
"In case the relative path is wrong or null, check your project settings and" +
"make sure the 'scramlRamlApi in scraml in Compile' value points to the main" +
"raml file in your project's (or module's) resources directory."
);
}
} |
package javax.time.calendrical;
import java.io.Serializable;
import javax.time.CalendricalException;
/**
* The range of valid values for a date-time field.
* <p>
* All {@link DateTimeField} instances have a valid range of values.
* For example, the ISO day-of-month runs from 1 to somewhere between 28 and 31.
* This class captures that valid range.
* <p>
* Instances of this class are not tied to a specific rule
* <p>
* This class is immutable and thread-safe.
*/
public final class DateTimeValueRange implements Serializable {
/**
* Serialization version.
*/
private static final long serialVersionUID = 1L;
/**
* The smallest minimum value.
*/
private final long minSmallest;
/**
* The largest minimum value.
*/
private final long minLargest;
/**
* The smallest maximum value.
*/
private final long maxSmallest;
/**
* The largest maximum value.
*/
private final long maxLargest;
/**
* Obtains a fixed value range.
* <p>
* This factory obtains a range where the minimum and maximum values are fixed.
* For example, the ISO month-of-year always runs from 1 to 12.
*
* @param min the minimum value
* @param max the maximum value
*/
public static DateTimeValueRange of(long min, long max) {
if (min > max) {
throw new IllegalArgumentException("Minimum value must be less than maximum value");
}
return new DateTimeValueRange(min, min, max, max);
}
/**
* Obtains a variable value range.
* <p>
* This factory obtains a range where the minimum value is fixed and the maximum value may vary.
* For example, the ISO day-of-month always starts at 1, but ends between 28 and 31.
*
* @param min the minimum value
* @param maxSmallest the smallest maximum value
* @param maxLargest the largest maximum value
*/
public static DateTimeValueRange of(long min, long maxSmallest, long maxLargest) {
return of(min, min, maxSmallest, maxLargest);
}
/**
* Obtains a fully variable value range.
* <p>
* This factory obtains a range where both the minimum and maximum value may vary.
*
* @param minSmallest the smallest minimum value
* @param minLargest the largest minimum value
* @param maxSmallest the smallest maximum value
* @param maxLargest the largest maximum value
*/
public static DateTimeValueRange of(long minSmallest, long minLargest, long maxSmallest, long maxLargest) {
if (minSmallest > minLargest) {
throw new IllegalArgumentException("Smallest minimum value must be less than largest minimum value");
}
if (maxSmallest > maxLargest) {
throw new IllegalArgumentException("Smallest maximum value must be less than largest maximum value");
}
if (minSmallest > maxLargest) {
throw new IllegalArgumentException("Minimum value must be less than maximum value");
}
return new DateTimeValueRange(minSmallest, minLargest, maxSmallest, maxLargest);
}
/**
* Restrictive constructor.
*
* @param minSmallest the smallest minimum value
* @param minLargest the largest minimum value
* @param maxSmallest the smallest minimum value
* @param maxLargest the largest minimum value
*/
private DateTimeValueRange(long minSmallest, long minLargest, long maxSmallest, long maxLargest) {
this.minSmallest = minSmallest;
this.minLargest = minLargest;
this.maxSmallest = maxSmallest;
this.maxLargest = maxLargest;
}
/**
* Is the value range fixed and fully known.
* <p>
* For example, the ISO day-of-month runs from 1 to between 28 and 31.
* Since there is uncertainty about the maximum value, the range is not fixed.
* However, for the month of January, the range is always 1 to 31, thus it is fixed.
*
* @return true if the set of values is fixed
*/
public boolean isFixed() {
return minSmallest == minLargest && maxSmallest == maxLargest;
}
/**
* Gets the minimum value that the field can take.
* <p>
* For example, the ISO day-of-month always starts at 1.
* The minimum is therefore 1.
*
* @return the minimum value for this field
*/
public long getMinimum() {
return minSmallest;
}
/**
* Gets the largest possible minimum value that the field can take.
* <p>
* For example, the ISO day-of-month always starts at 1.
* The largest minimum is therefore 1.
*
* @return the largest possible minimum value for this field
*/
public long getLargestMinimum() {
return minLargest;
}
/**
* Gets the smallest possible maximum value that the field can take.
* <p>
* For example, the ISO day-of-month runs to between 28 and 31 days.
* The smallest maximum is therefore 28.
*
* @return the smallest possible maximum value for this field
*/
public long getSmallestMaximum() {
return maxSmallest;
}
/**
* Gets the maximum value that the field can take.
* <p>
* For example, the ISO day-of-month runs to between 28 and 31 days.
* The maximum is therefore 31.
*
* @return the maximum value for this field
*/
public long getMaximum() {
return maxLargest;
}
/**
* Checks if all values in the range fit in an {@code int}.
* <p>
* This checks that all valid values are within the bounds of an {@code int}.
* <p>
* For example, the ISO month-of-year has values from 1 to 12, which fits in an {@code int}.
* By comparison, ISO nano-of-day runs from 1 to 86,400,000,000,000 which does not fit in an {@code int}.
* <p>
* This implementation uses {@link #getMinimum()} and {@link #getMaximum()}.
*
* @return true if a valid value always fits in an {@code int}
*/
public boolean isIntValue() {
return getMinimum() >= Integer.MIN_VALUE && getMaximum() <= Integer.MAX_VALUE;
}
/**
* Checks if the value is within the valid range.
* <p>
* This checks that the value is within the stored range of values.
*
* @param value the value to check
* @return true if the value is valid
*/
public boolean isValidValue(long value) {
return (value >= getMinimum() && value <= getMaximum());
}
/**
* Checks if the value is within the valid range and that all values
* in the range fit in an {@code int}.
* <p>
* This method combines {@link #isIntValue()} and {@link #isValidValue(long)}.
*
* @param value the value to check
* @return true if the value is valid and fits in an {@code int}
*/
public boolean isValidIntValue(long value) {
return isIntValue() && isValidValue(value);
}
/**
* Checks that the specified value is valid.
* <p>
* This validates that the value is within the valid range of values.
* The field is only used to improve the error message.
*
* @param value the value to check
* @param field the field being checked, may be null
* @return the value that was passed in
* @see #isValidValue(long)
*/
public long checkValidValue(long value, DateTimeField field) {
if (isValidValue(value) == false) {
if (field != null) {
throw new CalendricalException("Invalid value for " + field.getName() + " (valid values " + this + "): " + value);
} else {
throw new CalendricalException("Invalid value (valid values " + this + "): " + value);
}
}
return value;
}
/**
* Checks that the specified value is valid and fits in an {@code int}.
* <p>
* This validates that the value is within the valid range of values and that
* all valid values are within the bounds of an {@code int}.
* The field is only used to improve the error message.
*
* @param value the value to check
* @param field the field being checked, may be null
* @return the value that was passed in
* @see #isValidIntValue(long)
*/
public int checkValidIntValue(long value, DateTimeField field) {
if (isValidIntValue(value) == false) {
throw new CalendricalException("Invalid int value for " + field.getName() + ": " + value);
}
return (int) value;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof DateTimeValueRange) {
DateTimeValueRange other = (DateTimeValueRange) obj;
return minSmallest == other.minSmallest && minLargest == other.minLargest &&
maxSmallest == other.maxSmallest && maxLargest == other.maxLargest;
}
return false;
}
@Override
public int hashCode() {
long hash = minSmallest + minLargest << 16 + minLargest >> 48 + maxSmallest << 32 +
maxSmallest >> 32 + maxLargest << 48 + maxLargest >> 16;
return (int) (hash ^ (hash >>> 32));
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder();
buf.append(minSmallest);
if (minSmallest != minLargest) {
buf.append('/').append(minLargest);
}
buf.append(" - ").append(maxSmallest);
if (maxSmallest != maxLargest) {
buf.append('/').append(maxLargest);
}
return buf.toString();
}
} |
package markharder.physicsdemos.demo.demos;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Rectangle;
import markharder.physicsdemos.demo.interfacing.Slider;
public class Rocket implements Demo {
private static double EXHAUST_VELOCITY = -5.0;
private static double FUEL_MASS = 0.2;
public boolean running;
private int width, height;
private int ticks;
private Rectangle rocket;
private double vy, ay;
private int fuel;
private double mass = 5.0;
private int maxHeight;
private Slider fuelSlider;
private Slider massSlider;
public Rocket(int width, int height) {
this.width = width;
this.height = height;
running = false;
ticks = 0;
rocket = new Rectangle(210, 0, 40, 60);
maxHeight = (int) rocket.getHeight();
vy = 0;
ay = 0;
fuel = 60 / 3;
mass = 5.0;
fuelSlider = new Slider(400, 50, 300, 5, 30, 0.0, "Fuel");
massSlider = new Slider(450, 50, 300, 1, 10, 0.5, "Mass");
}
@Override
public void draw(Graphics g) {
g.setColor(Color.BLACK);
g.fillRect(0, 0, width, height);
g.setColor(Color.BLUE);
g.fillRect((int) (rocket.getX() - rocket.getWidth() / 2), (int) (height - (rocket.getY() + rocket.getHeight())), (int) rocket.getWidth(), (int) rocket.getHeight());
if (fuel > 0) {
g.setColor(Color.RED);
g.fillRect((int) (rocket.getX() - 5), (int) (height - rocket.getY()), 10, 10);
}
fuelSlider.draw(g);
massSlider.draw(g);
g.setColor(Color.GREEN);
g.drawLine(0, height - maxHeight, width - 100, height - maxHeight);
g.setFont(new Font("Times New Roman", Font.PLAIN, 24));
g.drawString(Integer.toString(maxHeight) + "m", 50, 50);
}
@Override
public void tick() {
if (fuelSlider.isActive()) {
fuelSlider.tick();
}
if (massSlider.isActive()) {
massSlider.tick();
if (rocket.getY() < 0.001) {
mass = massSlider.getValue();
rocket = new Rectangle(210, 0, (int) (mass * 8), 60);
}
}
if (running) {
if (fuel > 0) {
ay = -EXHAUST_VELOCITY / mass();
fuel
} else {
ay = 0;
}
if (rocket.getY() > 0) {
ay -= (9.8 / 60); // gravity
}
vy += ay;
rocket.setBounds((int) rocket.getX(), (int) (rocket.getY() + vy), (int) rocket.getWidth(), (int) rocket.getHeight());
if (rocket.getY() < 0.001) {
rocket.setBounds((int) rocket.getX(), 0, (int) rocket.getWidth(), (int) rocket.getHeight());
}
if (rocket.getY() + rocket.getHeight() > maxHeight) {
maxHeight = (int) (rocket.getY() + rocket.getHeight());
}
}
}
public double mass() {
return mass + FUEL_MASS * fuel;
}
public void start() {
}
public void pause() {
running = false;
}
public void quit() {
}
public void restart() {
running = false;
ticks = 0;
rocket = new Rectangle(210, 0, 40, 60);
maxHeight = (int) rocket.getHeight();
fuel = 20;
mass = 5.0;
vy = 0;
ay = 0;
fuelSlider = new Slider(400, 50, 300, 5, 30, 0.0, "Fuel");
massSlider = new Slider(450, 50, 300, 1, 10, 0.5, "Mass");
}
public void click(int x, int y) {
if (fuelSlider.contains(x, y)) {
fuelSlider.click();
} else if (massSlider.contains(x, y)) {
massSlider.click();
}
}
public void release(int x, int y) {
if (fuelSlider.isActive()) {
fuelSlider.release();
} else if (massSlider.isActive()) {
massSlider.release();
}
}
public void keypress(char key) {
ticks = 0;
mass = massSlider.getValue();
rocket = new Rectangle(210, 0, (int) (mass * 8), 60);
maxHeight = (int) rocket.getHeight();
fuel = (int) fuelSlider.getValue();
vy = 0;
ay = 0;
running = true;
}
} |
package mcjty.rftoolsdim.items;
import mcjty.rftoolsdim.RFToolsDim;
import net.minecraft.client.renderer.block.model.ModelResourceLocation;
import net.minecraft.item.Item;
import net.minecraftforge.client.model.ModelLoader;
import net.minecraftforge.fml.common.registry.GameRegistry;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
public class GenericRFToolsItem extends Item {
public GenericRFToolsItem(String name) {
setUnlocalizedName(name);
setRegistryName(name);
setCreativeTab(RFToolsDim.tabRfToolsDim);
GameRegistry.register(this);
}
@SideOnly(Side.CLIENT)
public void initModel() {
ModelLoader.setCustomModelResourceLocation(this, 0, new ModelResourceLocation(getRegistryName(), "inventory"));
}
} |
package net.anyflow.menton.http;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.handler.codec.http.DefaultFullHttpResponse;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.HttpHeaders;
import io.netty.handler.codec.http.HttpHeaders.Names;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpVersion;
import io.netty.handler.codec.http.websocketx.WebSocketFrame;
import io.netty.handler.codec.http.websocketx.WebSocketServerHandshaker;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import net.anyflow.menton.Configurator;
import net.anyflow.menton.Environment;
import org.json.JSONException;
import org.json.JSONObject;
import com.google.common.io.Files;
/**
* @author anyflow
*/
public class HttpServerHandler extends SimpleChannelInboundHandler<Object> {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(HttpServerHandler.class);
private static final String FAILED_TO_FIND_REQUEST_HANDLER = "Failed to find the request handler.";
private static final Map<String, String> FILE_REQUEST_EXTENSIONS;
static {
FILE_REQUEST_EXTENSIONS = new HashMap<String, String>();
try {
JSONObject obj = new JSONObject(Configurator.instance().getProperty("menton.httpServer.MIME"));
@SuppressWarnings("unchecked")
Iterator<String> keys = obj.keys();
while(keys.hasNext()) {
String key = keys.next();
FILE_REQUEST_EXTENSIONS.put(key, obj.get(key).toString());
}
}
catch(JSONException e) {
logger.error(e.getMessage(), e);
}
}
private final WebSocketFrameHandler webSocketFrameHandler;
private WebSocketServerHandshaker webSocketHandshaker = null;
public HttpServerHandler() {
webSocketFrameHandler = null;
}
public HttpServerHandler(WebSocketFrameHandler webSocketFrameHandler) {
this.webSocketFrameHandler = webSocketFrameHandler;
}
private String getWebResourceRequestPath(HttpRequest request) {
String path;
try {
path = new URI(request.getUri()).getPath();
}
catch(URISyntaxException e) {
return null;
}
for(String ext : FILE_REQUEST_EXTENSIONS.keySet()) {
if(path.endsWith("." + ext) == false) {
continue;
}
return path;
}
return null;
}
/*
* (non-Javadoc)
* @see io.netty.channel.SimpleChannelInboundHandler#channelRead0(io.netty.channel.ChannelHandlerContext, java.lang.Object)
*/
@Override
protected void channelRead0(ChannelHandlerContext ctx, Object msg) throws Exception {
if(msg instanceof FullHttpRequest) {
FullHttpRequest request = (FullHttpRequest)msg;
if("WebSocket".equalsIgnoreCase(request.headers().get("Upgrade")) && "Upgrade".equalsIgnoreCase(request.headers().get("Connection"))) {
webSocketHandshaker = (new DefaultWebSocketHandshaker()).handshake(ctx, request);
return;
}
}
else if(msg instanceof WebSocketFrame) {
if(webSocketHandshaker == null) { throw new IllegalStateException("WebSocketServerHandshaker shouldn't be null"); }
if(webSocketFrameHandler == null) { throw new IllegalStateException("webSocketFrameHandler not found"); }
webSocketFrameHandler.handle(webSocketHandshaker, ctx, (WebSocketFrame)msg);
return;
}
else {
return;
}
HttpRequest request = new HttpRequest(ctx.channel(), (FullHttpRequest)msg);
if(HttpHeaders.is100ContinueExpected(request)) {
ctx.write(new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.CONTINUE));
return;
}
if("true".equalsIgnoreCase(Configurator.instance().getProperty("menton.logging.writeHttpRequest"))) {
logger.info(request.toString());
}
HttpResponse response = HttpResponse.createServerDefault(ctx.channel(), request.headers().get(HttpHeaders.Names.COOKIE));
String webResourceRequestPath = getWebResourceRequestPath(request);
if(webResourceRequestPath != null) {
handleWebResourceRequest(response, webResourceRequestPath);
}
else {
try {
String path = (new URI(request.getUri())).getPath();
String content = handleClassTypeHandler(request, response, path);
response.setContent(content);
}
catch(URISyntaxException e) {
response.setStatus(HttpResponseStatus.NOT_FOUND);
logger.info("unexcepted URI : {}", request.getUri().toString());
}
catch(Exception e) {
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
logger.error("Unknown exception was thrown in business logic handler.\r\n" + e.getMessage(), e);
}
}
setDefaultHeaders(request, response);
if("true".equalsIgnoreCase(Configurator.instance().getProperty("menton.logging.writeHttpResponse"))) {
logger.info(response.toString());
}
ctx.write(response);
}
/**
* @param response
* @param webResourceRequestPath
* @throws IOException
*/
private void handleWebResourceRequest(HttpResponse response, String webResourceRequestPath) throws IOException {
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(webResourceRequestPath);
if(is == null) {
String rootPath = (new File(Configurator.instance().WebResourcePhysicalRootPath(), webResourceRequestPath)).getPath();
try {
is = new FileInputStream(rootPath);
}
catch(FileNotFoundException e) {
is = null;
}
}
if(is == null) {
response.setStatus(HttpResponseStatus.NOT_FOUND);
}
else {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
int nRead;
byte[] data = new byte[16384];
while((nRead = is.read(data, 0, data.length)) != -1) {
buffer.write(data, 0, nRead);
}
buffer.flush();
response.content().writeBytes(buffer.toByteArray());
String ext = Files.getFileExtension(webResourceRequestPath);
response.headers().set(Names.CONTENT_TYPE, FILE_REQUEST_EXTENSIONS.get(ext));
is.close();
}
}
private void setDefaultHeaders(HttpRequest request, HttpResponse response) {
response.headers().add(Names.SERVER, Environment.PROJECT_ARTIFACT_ID + " " + Environment.PROJECT_VERSION);
boolean keepAlive = request.headers().get(HttpHeaders.Names.CONNECTION) == HttpHeaders.Values.KEEP_ALIVE;
if(keepAlive) {
response.headers().set(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE);
}
if(Configurator.instance().getProperty("menton.httpServer.allowCrossDomain", "false").equalsIgnoreCase("true")) {
response.headers().add(Names.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
response.headers().add(Names.ACCESS_CONTROL_ALLOW_METHODS, "POST, GET, PUT, DELETE");
response.headers().add(Names.ACCESS_CONTROL_ALLOW_HEADERS, "X-PINGARUNER");
response.headers().add(Names.ACCESS_CONTROL_MAX_AGE, "1728000");
}
response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, response.content().readableBytes());
}
private String handleClassTypeHandler(HttpRequest request, HttpResponse response, String requestedPath) throws InstantiationException,
IllegalAccessException, IOException {
Class<? extends RequestHandler> handlerClass = RequestHandler.findClass(requestedPath, request.getMethod().toString());
if(handlerClass == null) {
response.setStatus(HttpResponseStatus.NOT_FOUND);
logger.info("unexcepted URI : {}", request.getUri().toString());
response.headers().add(Names.CONTENT_TYPE, "text/html");
return HtmlGenerator.error(FAILED_TO_FIND_REQUEST_HANDLER, response.getStatus());
}
RequestHandler handler = handlerClass.newInstance();
handler.initialize(request, response);
return handler.call();
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
ctx.flush();
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
logger.error(cause.getMessage(), cause);
ctx.close();
}
} |
package net.engin33r.luaspigot.lua.type;
import net.engin33r.luaspigot.lua.WeakType;
import net.engin33r.luaspigot.lua.annotation.DynFieldDef;
import net.engin33r.luaspigot.lua.annotation.MethodDef;
import org.bukkit.Location;
import org.bukkit.OfflinePlayer;
import org.bukkit.entity.Player;
import org.luaj.vm2.LuaValue;
import org.luaj.vm2.Varargs;
/**
* Wrapper type describing a player on the server.
*/
public class LuaPlayer extends WeakType {
private final OfflinePlayer p;
private static LuaValue typeMetatable = LuaValue.tableOf();
public LuaPlayer(OfflinePlayer p) {
super();
this.p = p;
registerField("uuid", new LuaUUID(p.getUniqueId()));
registerField("name", LuaValue.valueOf(p.getName()));
}
public LuaPlayer(Player p) {
this((OfflinePlayer) p);
}
@Override
public String toLuaString() {
return "player: "+p.getName()+" ("+p.getUniqueId()+")";
}
@Override
public String getName() {
return "player";
}
private OfflinePlayer getPlayer() {
return this.p;
}
@DynFieldDef(name = "online")
public LuaValue getOnline() {
return LuaValue.valueOf(this.p.isOnline());
}
@MethodDef(name = "message")
public Varargs message(Varargs args) {
Player p = this.p.getPlayer();
if (p != null) p.sendMessage(args.checkjstring(1));
return NIL;
}
@MethodDef(name = "teleport")
public Varargs teleport(Varargs args) {
Player p = this.p.getPlayer();
if (p == null) return NIL;
Location loc = null;
if (args.narg() < 3) {
error("at least three arguments expected");
return NIL;
} else if (args.narg() == 3) {
loc = new Location(p.getWorld(), args.checkdouble(1),
args.checkdouble(2), args.checkdouble(3));
}
p.teleport(loc);
return NIL;
}
@Override
protected LuaValue getMetatable() {
return typeMetatable;
}
} |
package net.mingsoft.cms.biz.impl;
import cn.hutool.core.lang.Assert;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import net.mingsoft.base.biz.impl.BaseBizImpl;
import net.mingsoft.base.dao.IBaseDao;
import net.mingsoft.basic.util.PinYinUtil;
import net.mingsoft.cms.biz.ICategoryBiz;
import net.mingsoft.cms.dao.ICategoryDao;
import net.mingsoft.cms.dao.IContentDao;
import net.mingsoft.cms.entity.CategoryEntity;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author
* 2019-11-28 15:12:32<br/>
* <br/>
*/
@Service("cmscategoryBizImpl")
@Transactional(rollbackFor = RuntimeException.class)
public class CategoryBizImpl extends BaseBizImpl<ICategoryDao, CategoryEntity> implements ICategoryBiz {
@Autowired
private ICategoryDao categoryDao;
@Autowired
private IContentDao contentDao;
@Override
protected IBaseDao getDao() {
// TODO Auto-generated method stub
return categoryDao;
}
@Override
public List<CategoryEntity> queryChilds(CategoryEntity category) {
// TODO Auto-generated method stub
return categoryDao.queryChildren(category);
}
@Override
public void saveEntity(CategoryEntity categoryEntity) {
// TODO Auto-generated method stub
String pingYin = PinYinUtil.getPingYin(categoryEntity.getCategoryTitle());
if (StrUtil.isNotBlank(categoryEntity.getCategoryPinyin())) {
pingYin = categoryEntity.getCategoryPinyin();
}
CategoryEntity category=new CategoryEntity();
category.setCategoryPinyin(pingYin);
Object categoryBizEntity = getEntity(category);
setParentId(categoryEntity);
categoryEntity.setCategoryPinyin(pingYin);
if(StrUtil.isNotBlank(categoryEntity.getCategoryId())&&!"0".equals(categoryEntity.getCategoryId())){
CategoryEntity parent = getById(categoryEntity.getCategoryId());
if(parent.getLeaf()){
parent.setLeaf(false);
updateById(parent);
}
}
categoryEntity.setLeaf(false);
if (StrUtil.isEmpty(categoryEntity.getId())) {
categoryEntity.setLeaf(true);
}
super.save(categoryEntity);
if(categoryBizEntity!=null){
categoryEntity.setCategoryPinyin(pingYin+categoryEntity.getId());
}
CategoryEntity parentCategory = null;
if (StringUtils.isNotBlank(categoryEntity.getCategoryId())) {
parentCategory = (CategoryEntity)getById(categoryEntity.getCategoryId());
}
String path=ObjectUtil.isNotNull(parentCategory)?parentCategory.getCategoryPath():"";
categoryEntity.setCategoryPath( path+"/" + categoryEntity.getCategoryPinyin());
setTopId(categoryEntity);
super.updateById(categoryEntity);
}
private void setParentId(CategoryEntity categoryEntity) {
String path = "";
if(StringUtils.isNotEmpty(categoryEntity.getCategoryId())&&Long.parseLong(categoryEntity.getCategoryId())>0) {
CategoryEntity category = (CategoryEntity)getById(categoryEntity.getCategoryId());
path = category.getCategoryPath();
if(StringUtils.isEmpty(category.getCategoryParentId())) {
categoryEntity.setCategoryParentId(category.getId());
} else {
categoryEntity.setCategoryParentId(category.getCategoryParentId()+","+category.getId());
}
}else {
categoryEntity.setCategoryParentId(null);
}
if(StringUtils.isNotBlank(categoryEntity.getId())) {
categoryEntity.setCategoryPath(path+ "/" + categoryEntity.getCategoryPinyin());
}
}
private void setChildParentId(CategoryEntity categoryEntity) {
CategoryEntity category=new CategoryEntity();
category.setCategoryId(categoryEntity.getId());
List<CategoryEntity> list = categoryDao.query(category);
list.forEach(x->{
if(StringUtils.isEmpty(categoryEntity.getCategoryParentId())) {
x.setCategoryParentId(categoryEntity.getId());
} else {
x.setCategoryParentId(categoryEntity.getCategoryParentId()+","+categoryEntity.getId());
}
String path=categoryEntity.getCategoryPath();
//parentIds
x.setCategoryPath(path+"/"+x.getCategoryPinyin());
super.updateEntity(x);
setChildParentId(x);
});
}
@Override
public void updateEntity(CategoryEntity entity) {
setParentId(entity);
String pingYin =entity.getCategoryPinyin();
if(StrUtil.isNotBlank(pingYin)){
CategoryEntity category=new CategoryEntity();
category.setCategoryPinyin(pingYin);
CategoryEntity categoryBizEntity = (CategoryEntity)getEntity(category);
if(categoryBizEntity!=null&&!categoryBizEntity.getId().equals(entity.getId())){
entity.setCategoryPinyin(pingYin+entity.getId());
}
}
setParentLeaf(entity);
setTopId(entity);
super.updateById(entity);
setChildParentId(entity);
}
@Override
public void update(CategoryEntity entity) {
super.updateEntity(entity);
}
@Override
public void delete(String categoryId) {
// TODO Auto-generated method stub
CategoryEntity category = (CategoryEntity) categoryDao.selectById(categoryId);
if(category != null){
category.setCategoryParentId(null);
List<CategoryEntity> childrenList = categoryDao.queryChildren(category);
List<String> ids = new ArrayList<>();
for(int i = 0; i < childrenList.size(); i++){
ids.add(childrenList.get(i).getId());
}
categoryDao.deleteBatchIds(ids);
contentDao.deleteEntityByCategoryIds(ids.toArray(new String[ids.size()]));
}
}
/**
*
* @param entity
*/
private void setParentLeaf(CategoryEntity entity){
Assert.notNull(entity);
CategoryEntity categoryEntity = getById(entity.getId());
if(!entity.getCategoryId().equals(categoryEntity.getCategoryId())){
if(StrUtil.isNotBlank(categoryEntity.getCategoryId())&&!"0".equals(categoryEntity.getCategoryId())){
CategoryEntity parent = getById(categoryEntity.getCategoryId());
boolean leaf = parent.getLeaf();
QueryWrapper<CategoryEntity> queryWrapper = new QueryWrapper<>();
parent.setLeaf(count(queryWrapper.eq("category_id",parent.getId()).ne("id",entity.getId()))==0);
if(leaf!=parent.getLeaf()){
updateById(parent);
}
}
if(StrUtil.isNotBlank(entity.getCategoryId())&&!"0".equals(entity.getCategoryId())){
CategoryEntity parent = getById(entity.getCategoryId());
if(parent.getLeaf()){
parent.setLeaf(false);
updateById(parent);
}
}
}
}
/**
* id
* @param entity
*/
private void setTopId(CategoryEntity entity){
String categoryParentId = entity.getCategoryParentId();
if(StrUtil.isNotBlank(categoryParentId)){
String[] ids = categoryParentId.split(",");
//ParentId
if(ids.length>0){
entity.setTopId(ids[0]);
return;
}
}
entity.setTopId("0");
}
} |
package net.rushhourgame.managedbean;
import java.io.Serializable;
import java.util.List;
import java.util.logging.Logger;
import javax.annotation.PostConstruct;
import javax.inject.Named;
import javax.faces.view.ViewScoped;
import javax.inject.Inject;
import javax.transaction.Transactional;
import net.rushhourgame.RushHourSession;
import net.rushhourgame.controller.CompanyController;
import net.rushhourgame.controller.LineController;
import net.rushhourgame.controller.PlayerController;
import net.rushhourgame.controller.RailController;
import net.rushhourgame.controller.ResidenceController;
import net.rushhourgame.controller.StationController;
import net.rushhourgame.controller.StepForHumanController;
import net.rushhourgame.entity.Company;
import net.rushhourgame.entity.Line;
import net.rushhourgame.entity.Player;
import net.rushhourgame.entity.RailEdge;
import net.rushhourgame.entity.RailNode;
import net.rushhourgame.entity.Residence;
import net.rushhourgame.entity.Station;
import net.rushhourgame.entity.StepForHuman;
import net.rushhourgame.exception.RushHourException;
import static net.rushhourgame.managedbean.OperationType.*;
@Named(value = "game")
@ViewScoped
public class GameViewBean implements Serializable{
private static final long serialVersionUID = 1L;
private static final Logger LOG = Logger.getLogger(GameViewBean.class.getName());
@Inject
protected PlayerController pCon;
@Inject
protected CompanyController cCon;
@Inject
protected ResidenceController rCon;
@Inject
protected RailController railCon;
@Inject
protected StationController stCon;
@Inject
protected LineController lCon;
@Inject
protected StepForHumanController sCon;
@Inject
protected RushHourSession rhSession;
protected Player player;
protected OperationType operation = NONE;
protected double centerX;
protected double centerY;
protected double scale;
protected int mouseX;
protected int mouseY;
@PostConstruct
public void init() {
player = pCon.findByToken(rhSession.getToken());
scale = 4;
}
@Transactional
public void onClick() throws RushHourException{
switch(operation){
case CREATE_RAIL:
railCon.create(player, mouseX, mouseY);
break;
}
}
public List<Company> getCompanies() {
return cCon.findIn(centerX, centerY, scale);
}
public List<Residence> getResidences() {
return rCon.findIn(centerX, centerY, scale);
}
public List<RailNode> getRailNodes() {
return railCon.findNodeIn(centerX, centerY, scale);
}
public List<RailEdge> getRailEdges() {
return railCon.findEdgeIn(centerX, centerY, scale);
}
public List<Station> getStations() {
return stCon.findIn(centerX, centerY, scale);
}
@Transactional
public List<Line> getLines() {
return lCon.findIn(centerX, centerY, scale);
}
public List<StepForHuman> getStepForHuman() {
return sCon.findIn(centerX, centerY, scale);
}
public int getMouseX() {
return mouseX;
}
public void setMouseX(int mouseX) {
this.mouseX = mouseX;
}
public int getMouseY() {
return mouseY;
}
public void setMouseY(int mouseY) {
this.mouseY = mouseY;
}
public OperationType getOperation() {
return operation;
}
public void setOperation(OperationType operation) {
this.operation = operation;
}
public boolean isOperating(){
return operation != NONE;
}
} |
package net.sf.jabref.importer;
import java.io.*;
import java.util.*;
import net.sf.jabref.importer.fileformat.*;
import net.sf.jabref.logic.id.IdGenerator;
import net.sf.jabref.logic.l10n.Localization;
import net.sf.jabref.model.database.BibtexDatabase;
import net.sf.jabref.model.entry.BibtexEntry;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import net.sf.jabref.*;
public class ImportFormatReader {
public static final String BIBTEX_FORMAT = "BibTeX";
/**
* all import formats, in the default order of import formats
*/
private final SortedSet<ImportFormat> formats = new TreeSet<ImportFormat>();
private static final Log LOGGER = LogFactory.getLog(ImportFormatReader.class);
public void resetImportFormats() {
formats.clear();
formats.add(new BiblioscapeImporter());
formats.add(new BibtexImporter());
formats.add(new BibteXMLImporter());
formats.add(new BiomailImporter());
formats.add(new CopacImporter());
formats.add(new CsaImporter());
formats.add(new EndnoteImporter());
formats.add(new FreeCiteImporter());
formats.add(new InspecImporter());
formats.add(new IsiImporter());
formats.add(new JstorImporter());
formats.add(new MedlineImporter());
formats.add(new MedlinePlainImporter());
formats.add(new MsBibImporter());
formats.add(new OvidImporter());
formats.add(new PdfContentImporter());
formats.add(new PdfXmpImporter());
formats.add(new RepecNepImporter());
formats.add(new RisImporter());
formats.add(new ScifinderImporter());
formats.add(new SilverPlatterImporter());
formats.add(new SixpackImporter());
/**
* Get custom import formats
*/
for (CustomImportList.Importer importer : Globals.prefs.customImports) {
try {
ImportFormat imFo = importer.getInstance();
formats.add(imFo);
} catch (Exception e) {
System.err.println("Could not instantiate " + importer.getName() + " importer, will ignore it. Please check if the class is still available.");
e.printStackTrace();
}
}
}
/**
* Format for a given CLI-ID.
* <p>
* <p>Will return the first format according to the default-order of
* format that matches the given ID.</p>
*
* @param cliId CLI-Id
* @return Import Format or <code>null</code> if none matches
*/
private ImportFormat getByCliId(String cliId) {
for (ImportFormat format : formats) {
if (format.getCLIId().equals(cliId)) {
return format;
}
}
return null;
}
public List<BibtexEntry> importFromStream(String format, InputStream in, OutputPrinter status)
throws IOException {
ImportFormat importer = getByCliId(format);
if (importer == null) {
throw new IllegalArgumentException("Unknown import format: " + format);
}
List<BibtexEntry> res = importer.importEntries(in, status);
// Remove all empty entries
if (res != null) {
ImportFormatReader.purgeEmptyEntries(res);
}
return res;
}
public List<BibtexEntry> importFromFile(String format, String filename, OutputPrinter status)
throws IOException {
ImportFormat importer = getByCliId(format);
if (importer == null) {
throw new IllegalArgumentException("Unknown import format: " + format);
}
return importFromFile(importer, filename, status);
}
public List<BibtexEntry> importFromFile(ImportFormat importer, String filename, OutputPrinter status) throws IOException {
List<BibtexEntry> result = null;
InputStream stream = null;
try {
File file = new File(filename);
stream = new FileInputStream(file);
if (!importer.isRecognizedFormat(stream)) {
throw new IOException(Localization.lang("Wrong file format"));
}
stream = new FileInputStream(file);
result = importer.importEntries(stream, status);
} finally {
if (stream != null) {
stream.close();
}
}
return result;
}
public static BibtexDatabase createDatabase(Collection<BibtexEntry> bibentries) {
ImportFormatReader.purgeEmptyEntries(bibentries);
BibtexDatabase database = new BibtexDatabase();
for (BibtexEntry entry : bibentries) {
entry.setId(IdGenerator.next());
database.insertEntry(entry);
}
return database;
}
/**
* All custom importers.
* <p>
* <p>Elements are in default order.</p>
*
* @return all custom importers, elements are of type InputFormat
*/
public SortedSet<ImportFormat> getCustomImportFormats() {
SortedSet<ImportFormat> result = new TreeSet<ImportFormat>();
for (ImportFormat format : formats) {
if (format.getIsCustomImporter()) {
result.add(format);
}
}
return result;
}
/**
* All built-in importers.
* <p>
* <p>Elements are in default order.</p>
*
* @return all custom importers, elements are of type InputFormat
*/
public SortedSet<ImportFormat> getBuiltInInputFormats() {
SortedSet<ImportFormat> result = new TreeSet<ImportFormat>();
for (ImportFormat format : formats) {
if (!format.getIsCustomImporter()) {
result.add(format);
}
}
return result;
}
/**
* All importers.
* <p>
* <p>
* Elements are in default order.
* </p>
*
* @return all custom importers, elements are of type InputFormat
*/
public SortedSet<ImportFormat> getImportFormats() {
return this.formats;
}
/**
* Human readable list of all known import formats (name and CLI Id).
* <p>
* <p>List is in default-order.</p>
*
* @return human readable list of all known import formats
*/
public String getImportFormatList() {
StringBuilder sb = new StringBuilder();
for (ImportFormat imFo : formats) {
int pad = Math.max(0, 14 - imFo.getFormatName().length());
sb.append(" ");
sb.append(imFo.getFormatName());
for (int j = 0; j < pad; j++) {
sb.append(" ");
}
sb.append(" : ");
sb.append(imFo.getCLIId());
sb.append("\n");
}
return sb.toString(); //.substring(0, res.length()-1);
}
/**
* Expand initials, e.g. EH Wissler -> E. H. Wissler or Wissler, EH -> Wissler, E. H.
*
* @param name
* @return The name after expanding initials.
*/
public static String expandAuthorInitials(String name) {
String[] authors = name.split(" and ");
StringBuilder sb = new StringBuilder();
for (int i = 0; i < authors.length; i++) {
if (authors[i].contains(", ")) {
String[] names = authors[i].split(", ");
if (names.length > 0) {
sb.append(names[0]);
if (names.length > 1) {
sb.append(", ");
}
}
for (int j = 1; j < names.length; j++) {
if (j == 1) {
sb.append(ImportFormatReader.expandAll(names[j]));
} else {
sb.append(names[j]);
}
if (j < names.length - 1) {
sb.append(", ");
}
}
} else {
String[] names = authors[i].split(" ");
if (names.length > 0) {
sb.append(ImportFormatReader.expandAll(names[0]));
}
for (int j = 1; j < names.length; j++) {
sb.append(" ");
sb.append(names[j]);
}
}
if (i < authors.length - 1) {
sb.append(" and ");
}
}
return sb.toString().trim();
}
private static String expandAll(String s) {
//System.out.println("'"+s+"'");
// Avoid arrayindexoutof.... :
if (s.isEmpty()) {
return s;
}
// If only one character (uppercase letter), add a dot and return immediately:
if (s.length() == 1 && Character.isLetter(s.charAt(0)) &&
Character.isUpperCase(s.charAt(0))) {
return s + ".";
}
StringBuilder sb = new StringBuilder();
char c = s.charAt(0);
char d = 0;
for (int i = 1; i < s.length(); i++) {
d = s.charAt(i);
if (Character.isLetter(c) && Character.isUpperCase(c) &&
Character.isLetter(d) && Character.isUpperCase(d)) {
sb.append(c);
sb.append(". ");
} else {
sb.append(c);
}
c = d;
}
if (Character.isLetter(c) && Character.isUpperCase(c) &&
Character.isLetter(d) && Character.isUpperCase(d)) {
sb.append(c);
sb.append(". ");
} else {
sb.append(c);
}
return sb.toString().trim();
}
static File checkAndCreateFile(String filename) {
File f = new File(filename);
if (!f.exists() && !f.canRead() && !f.isFile()) {
LOGGER.info("Error " + filename + " is not a valid file and|or is not readable.");
return null;
} else {
return f;
}
}
// Set a field, unless the string to set is empty.
public static void setIfNecessary(BibtexEntry be, String field, String content) {
if (!content.equals("")) {
be.setField(field, content);
}
}
public static Reader getUTF8Reader(File f) throws IOException {
return getReader(f, "UTF-8");
}
public static Reader getUTF16Reader(File f) throws IOException {
return getReader(f, "UTF-16");
}
public static Reader getReader(File f, String encoding)
throws IOException {
InputStreamReader reader;
reader = new InputStreamReader(new FileInputStream(f), encoding);
return reader;
}
public static Reader getReaderDefaultEncoding(InputStream in)
throws IOException {
InputStreamReader reader;
reader = new InputStreamReader(in, Globals.prefs.get(JabRefPreferences.DEFAULT_ENCODING));
return reader;
}
/**
* Receives an ArrayList of BibtexEntry instances, iterates through them, and
* removes all entries that have no fields set. This is useful for rooting out
* an unsucessful import (wrong format) that returns a number of empty entries.
*/
private static void purgeEmptyEntries(Collection<BibtexEntry> entries) {
for (Iterator<BibtexEntry> i = entries.iterator(); i.hasNext(); ) {
BibtexEntry entry = i.next();
// If there are no fields, remove the entry:
if (entry.getAllFields().isEmpty()) {
i.remove();
}
}
}
public static class UnknownFormatImport {
public final String format;
public final ParserResult parserResult;
public UnknownFormatImport(String format, ParserResult parserResult) {
this.format = format;
this.parserResult = parserResult;
}
}
/**
* Tries to import a file by iterating through the available import filters,
* and keeping the import that seems most promising.
* <p>
* If all fails this method attempts to read this file as bibtex.
*
* @throws IOException
*/
public UnknownFormatImport importUnknownFormat(String filename) {
// we don't use a provided OutputPrinter (such as the JabRef frame),
// as we don't want to see any outputs from failed importers:
// we expect failures and do not want to report them to the user
OutputPrinterToNull nullOutput = new OutputPrinterToNull();
// stores ref to best result, gets updated at the next loop
List<BibtexEntry> bestResult = null;
int bestResultCount = 0;
String bestFormatName = null;
// Cycle through all importers:
for (ImportFormat imFo : getImportFormats()) {
try {
List<BibtexEntry> entries = importFromFile(imFo, filename, nullOutput);
int entryCount;
if (entries == null) {
entryCount = 0;
} else {
ImportFormatReader.purgeEmptyEntries(entries);
entryCount = entries.size();
}
if (entryCount > bestResultCount) {
bestResult = entries;
bestResultCount = bestResult.size();
bestFormatName = imFo.getFormatName();
}
} catch (IOException ex) {
// The import didn't succeed. Go on.
}
}
if (bestResult != null) {
// we found something
ParserResult parserResult = new ParserResult(bestResult);
return new UnknownFormatImport(bestFormatName, parserResult);
}
// Finally, if all else fails, see if it is a BibTeX file:
try {
ParserResult pr = OpenDatabaseAction.loadDatabase(new File(filename),
Globals.prefs.get(JabRefPreferences.DEFAULT_ENCODING));
if (pr.getDatabase().getEntryCount() > 0
|| pr.getDatabase().getStringCount() > 0) {
pr.setFile(new File(filename));
return new UnknownFormatImport(ImportFormatReader.BIBTEX_FORMAT, pr);
}
} catch (Throwable ex) {
return null;
}
return null;
}
} |
package net.sourceforge.cilib.algorithm;
import java.util.ArrayList;
import java.util.List;
import net.sourceforge.cilib.entity.visitor.TopologyVisitor;
import net.sourceforge.cilib.problem.OptimisationProblem;
import net.sourceforge.cilib.problem.OptimisationSolution;
import net.sourceforge.cilib.stoppingcondition.StoppingCondition;
import net.sourceforge.cilib.util.Cloneable;
/**
* <p>
* All algorithms in CIlib should be subclasses of <code>Algorithm</code>. This class handles
* stopping criteria, events, threading and measurements. Subclasses of <code>Algorithm</code>
* must provide an implementation for <code>protected abstract void performIteration()</code>. If
* a subclass overrides {@link #initialise()} then it must call <code>super.initialise()</code>.
* Failure to do so will cause an {@linkplain InitialisationException} to be thrown when {@link #run()}
* is called.
* </p>
* @author Edwin Peer
*/
public abstract class Algorithm implements Cloneable, Runnable {
private static final long serialVersionUID = 7197544770653732632L;
private List<StoppingCondition> stoppingConditions;
private List<AlgorithmListener> algorithmListeners;
private int iterations;
private volatile boolean running;
private boolean initialised;
protected OptimisationProblem optimisationProblem;
/**
* This {@linkplain ThreadLocal} variable maintains the stack of the currently
* executing algorithm. It is defined as a static member and as a result is not
* required to be marked as transient as static members are not allowed to be
* serializable according to the Java Specification.
*/
private static ThreadLocal<AlgorithmStack> currentAlgorithmStack = new ThreadLocal<AlgorithmStack>() {
@Override
protected AlgorithmStack initialValue() {
return new AlgorithmStack();
}
};
/**
* Default constructor for {@linkplain Algorithm} classes. Sets up the correct state
* for the instance and initialises the needed containers needed for the different
* {@linkplain AlgorithmEvent}s that are generated.
*/
protected Algorithm() {
stoppingConditions = new ArrayList<StoppingCondition>();
algorithmListeners = new ArrayList<AlgorithmListener>();
running = false;
initialised = false;
}
/**
* Copy constructor. Create a deep copy of the provided instance and return it.
* @param copy The instance to copy.
*/
public Algorithm(Algorithm copy) {
stoppingConditions = new ArrayList<StoppingCondition>();
for (StoppingCondition stoppingCondition : copy.stoppingConditions) {
StoppingCondition clone = stoppingCondition.getClone();
clone.setAlgorithm(this);
stoppingConditions.add(clone);
}
algorithmListeners = new ArrayList<AlgorithmListener>();
for (AlgorithmListener listen : copy.algorithmListeners) {
algorithmListeners.add(listen.getClone());
}
running = false;
initialised = false;
if (copy.optimisationProblem != null)
optimisationProblem = copy.optimisationProblem.getClone();
}
/**
* {@inheritDoc}
*/
@Override
public abstract Algorithm getClone();
/**
* Reset the {@linkplain Algorithm} internals if needed.
*/
public void reset() {
throw new UnsupportedOperationException("'reset()' method not implemented for '" + this.getClass().getName() + "'");
}
/**
* Initialises the algorithm. Must be called before {@link #run()} is called.
*/
public final void initialise() {
iterations = 0;
running = true;
initialised = true;
if (stoppingConditions.isEmpty()) {
throw new InitialisationException("No stopping conditions specified");
}
currentAlgorithmStack.get().push(this);
performInitialisation();
currentAlgorithmStack.get().pop();
}
/**
* Perform the actions of the current {@linkplain Algorithm} for a single iteration. This
* method calls {@linkplain Algorithm#algorithmIteration()} after it performs some
* internal tasks by maintaining the stack of the currently executing algorithm instances.
*/
public final void performIteration() {
currentAlgorithmStack.get().push(this);
algorithmIteration();
iterations++;
currentAlgorithmStack.get().pop();
}
/**
* The actual operations that the current {@linkplain Algorithm} performs within a single
* iteration.
*/
protected abstract void algorithmIteration();
/**
* Perform the needed initialisation required before the execution of the algorithm
* starts.
*/
public void performInitialisation() {
// subclasses can override the behaviour for this method
}
/**
* Perform the needed unintialisation steps after the algorithm completes it's
* execution.
*/
public void performUninitialisation() {
// subclasses can override the behaviour for this method
}
/**
* Executes the algorithm.
* @exception InitialisationException algorithm was not properly initialised.
*/
@Override
public void run() {
if (!initialised) {
throw new InitialisationException("Algorithm not initialised");
}
fireAlgorithmStarted();
currentAlgorithmStack.get().push(this);
while (running && (!isFinished())) {
performIteration();
fireIterationCompleted();
}
if (running) {
fireAlgorithmFinished();
}
else {
fireAlgorithmTerminated();
}
performUninitialisation();
currentAlgorithmStack.remove();
}
/**
* Adds a stopping condition.
* @param stoppingCondition A {@link net.sourceforge.cilib.stoppingcondition.StoppingCondition}
* to be added.
*/
public final void addStoppingCondition(StoppingCondition stoppingCondition) {
stoppingCondition.setAlgorithm(this);
stoppingConditions.add(stoppingCondition);
}
/**
* Removes a stopping condition.
* @param stoppingCondition The {@link net.sourceforge.cilib.stoppingcondition.StoppingCondition}
* to be removed.
*/
public final void removeStoppingCondition(StoppingCondition stoppingCondition) {
stoppingConditions.remove(stoppingCondition);
}
/**
* Adds an algorithm event listener. Event listeners are notified at various stages during the
* execution of an algorithm.
* @param listener An {@link AlgorithmListener} to be added.
*/
public final void addAlgorithmListener(AlgorithmListener listener) {
algorithmListeners.add(listener);
}
/**
* Removes an algorithm event listener.
* @param listener The {@link AlgorithmListener} to be removed.
*/
public final void removeAlgorithmListener(AlgorithmListener listener) {
algorithmListeners.remove(listener);
}
/**
* Returns the number of iterations that have been performed by the algorihtm.
* @return The number of iterations.
*/
public final int getIterations() {
return iterations;
}
/**
* Returns the percentage the algorithm is from completed (as a fraction). The percentage
* complete is calculated based on the stopping condition that is closest to finished.
* @return The percentage complete as a fraction.
*/
public final double getPercentageComplete() {
double percentageComplete = 0;
for (StoppingCondition condition : stoppingConditions) {
if (condition.getPercentageCompleted() > percentageComplete) {
percentageComplete = condition.getPercentageCompleted();
}
}
return percentageComplete;
}
/**
* Returns true if the algorithm has finished executing.
* @return true if the algorithm is finished
*/
public final boolean isFinished() {
for (StoppingCondition condition : stoppingConditions) {
if (condition.isCompleted()) {
return true;
}
}
return false;
}
/**
* Terminates the algorithm.
*/
public final void terminate() {
running = false;
}
/**
* Accessor for the top-level currently executing algorithm running in the current thread.
* @return the instance of the algorithm that is running in the current thread.
*/
public static Algorithm get() {
return currentAlgorithmStack.get().peek();
}
/**
* Static accessor to allow the current level of algorithm composition to be returned.
* @see AlgorithmStack#asList()
* @return An unmodifiable list of algorithms.
*/
public static List<Algorithm> getAlgorithmList() {
return currentAlgorithmStack.get().asList();
}
/**
* Get the current list of {@linkplain StoppingCondition} instances that are
* associated with the current {@linkplain Algorithm}.
* @return The list of {@linkplain StoppingCondition} instances associated with
* the current {@linkplain Algorithm}.
*/
public List<StoppingCondition> getStoppingConditions() {
return this.stoppingConditions;
}
/**
* Fire the {@linkplain AlgorithmEvent} to indicate that the {@linkplain Algorithm}
* has started execution.
*/
private void fireAlgorithmStarted() {
for (AlgorithmListener listener : algorithmListeners) {
listener.algorithmStarted(new AlgorithmEvent(this));
}
}
/**
* Fire the {@linkplain AlgorithmEvent} to indicate that the {@linkplain Algorithm}
* has finished execution.
*/
private void fireAlgorithmFinished() {
for (AlgorithmListener listener : algorithmListeners) {
listener.algorithmFinished(new AlgorithmEvent(this));
}
}
/**
* Fire the {@linkplain AlgorithmEvent} to indicate that the {@linkplain Algorithm}
* has been terminated.
*/
private void fireAlgorithmTerminated() {
for (AlgorithmListener listener : algorithmListeners) {
listener.algorithmTerminated(new AlgorithmEvent(this));
}
}
/**
* Fire the {@linkplain AlgorithmEvent} to indicate that the {@linkplain Algorithm}
* has completed an iteration.
*/
private void fireIterationCompleted() {
for (AlgorithmListener listener : algorithmListeners) {
listener.iterationCompleted(new AlgorithmEvent(this));
}
}
/**
* Set the optimisation problem to be solved. By default, the problem is <code>null</code>.
* That is, it is necessary to set the optimisation problem before calling {@link #initialise()}.
* @param problem An implementation of the
* {@link net.sourceforge.cilib.problem.OptimisationProblemAdapter} interface.
*/
public void setOptimisationProblem(OptimisationProblem problem) {
this.optimisationProblem = problem;
}
/**
* Get the specified {@linkplain OptimisationProblem}.
* @return The specified {@linkplain OptimisationProblem}.
*/
public OptimisationProblem getOptimisationProblem() {
return this.optimisationProblem;
}
/**
* Get the best current solution. This best solution is determined from the personal bests of the
* particles.
* @return The <code>OptimisationSolution</code> representing the best solution.
*/
public abstract OptimisationSolution getBestSolution();
/**
* Get the collection of best solutions. This result does not actually make sense in the normal
* PSO algorithm, but rather in a MultiObjective optimization.
* @return The <code>Collection<OptimisationSolution></code> containing the solutions.
*/
public abstract List<OptimisationSolution> getSolutions();
/**
* General method to accept a visitor to perform a calculation on the current algorithm. The
* operation is generally deferred down to the underlying topology associated with the
* algorithm, as the algorithm does not contain information, but rather only behaviour to alter
* the candidate solutions that are managed by the <tt>Topology</tt>.
* @param visitor The <tt>Visitor</tt> to be applied to the algorithm
* @return The result of the visitor operation.
*/
public abstract double accept(TopologyVisitor visitor);
} |
package no.ntnu.okse.protocol.wsn;
import com.sun.org.apache.xerces.internal.dom.ElementNSImpl;
import no.ntnu.okse.Application;
import no.ntnu.okse.core.CoreService;
import no.ntnu.okse.core.messaging.Message;
import no.ntnu.okse.core.messaging.MessageService;
import no.ntnu.okse.core.subscription.Publisher;
import no.ntnu.okse.core.subscription.Subscriber;
import no.ntnu.okse.core.topic.TopicService;
import org.apache.log4j.Logger;
import org.ntnunotif.wsnu.base.internal.Hub;
import org.ntnunotif.wsnu.base.net.NuNamespaceContextResolver;
import org.ntnunotif.wsnu.base.topics.TopicUtils;
import org.ntnunotif.wsnu.base.topics.TopicValidator;
import org.ntnunotif.wsnu.base.util.InternalMessage;
import org.ntnunotif.wsnu.services.eventhandling.PublisherRegistrationEvent;
import org.ntnunotif.wsnu.services.eventhandling.SubscriptionEvent;
import org.ntnunotif.wsnu.services.filterhandling.FilterSupport;
import org.ntnunotif.wsnu.services.general.ExceptionUtilities;
import org.ntnunotif.wsnu.services.general.HelperClasses;
import org.ntnunotif.wsnu.services.general.ServiceUtilities;
import org.ntnunotif.wsnu.services.general.WsnUtilities;
import org.ntnunotif.wsnu.services.implementations.notificationbroker.AbstractNotificationBroker;
import org.oasis_open.docs.wsn.b_2.*;
import org.oasis_open.docs.wsn.br_2.RegisterPublisher;
import org.oasis_open.docs.wsn.br_2.RegisterPublisherResponse;
import org.oasis_open.docs.wsn.brw_2.PublisherRegistrationFailedFault;
import org.oasis_open.docs.wsn.brw_2.PublisherRegistrationRejectedFault;
import org.oasis_open.docs.wsn.bw_2.*;
import org.oasis_open.docs.wsrf.rw_2.ResourceUnknownFault;
import javax.jws.*;
import javax.jws.soap.SOAPBinding;
import javax.xml.XMLConstants;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.datatype.DatatypeConfigurationException;
import javax.xml.datatype.DatatypeFactory;
import javax.xml.datatype.XMLGregorianCalendar;
import javax.xml.namespace.NamespaceContext;
import javax.xml.namespace.QName;
import javax.xml.ws.wsaddressing.W3CEndpointReference;
import javax.xml.ws.wsaddressing.W3CEndpointReferenceBuilder;
import java.util.*;
@WebService(targetNamespace = "http://docs.oasis-open.org/wsn/brw-2", name = "NotificationBroker")
@XmlSeeAlso({org.oasis_open.docs.wsn.t_1.ObjectFactory.class, org.oasis_open.docs.wsn.br_2.ObjectFactory.class, org.oasis_open.docs.wsrf.r_2.ObjectFactory.class, org.oasis_open.docs.wsrf.bf_2.ObjectFactory.class, org.oasis_open.docs.wsn.b_2.ObjectFactory.class})
@SOAPBinding(parameterStyle = SOAPBinding.ParameterStyle.BARE)
public class WSNCommandProxy extends AbstractNotificationBroker {
private Logger log;
private FilterSupport filterSupport;
private WSNSubscriptionManager _subscriptionManager;
private WSNRegistrationManager _registrationManager;
public WSNCommandProxy(Hub hub) {
this.log = Logger.getLogger(WSNCommandProxy.class.getName());
this.setHub(hub);
this.filterSupport = FilterSupport.createDefaultFilterSupport();
this._subscriptionManager = null;
this._registrationManager = null;
}
public WSNCommandProxy() {
this.log = Logger.getLogger(WSNCommandProxy.class.getName());
this.filterSupport = FilterSupport.createDefaultFilterSupport();
this._subscriptionManager = null;
this._registrationManager = null;
}
// For now, set both WS-Nu submanager and OKSE submanager fields.
public void setSubscriptionManager(WSNSubscriptionManager subManager) {
this._subscriptionManager = subManager;
this.manager = this._subscriptionManager;
this.usesManager = true;
}
// Sets the manager that this broker uses
public void setRegistrationManager(WSNRegistrationManager pubManager) {
this._registrationManager = pubManager;
this.registrationManager = pubManager;
}
/**
* Returns the WSNSubscriptionManager associated with this broker proxy
* @return The WSNSubscriptionManager instance
*/
public WSNSubscriptionManager getProxySubscriptionManager() {
return this._subscriptionManager;
}
/**
* Returns the WSNRegistrationManager associated with this broker proxy
* @return The WSNRegistrationManager instance
*/
public WSNRegistrationManager getProxyRegistrationManager() {
return this._registrationManager;
}
/**
* Check if a subscription / registration -key exists
* @param s The key to check existance for
* @return True if the key exists, false otherwise
*/
@Override
@WebMethod(exclude = true)
public boolean keyExists(String s) {
return _subscriptionManager.keyExists(s) || _registrationManager.keyExists(s);
}
/**
* Fetch the collection of recipient subscriptionKeys
* @return A collection containing the subscriptionKeys as strings
*/
@Override
@WebMethod(exclude = true)
protected Collection<String> getAllRecipients() {
return _subscriptionManager.getAllRecipients();
}
/**
* Retrieves the endpointReference of a subscriber from its subscription key
* @param subscriptionKey The subscription key representing the subscriber
* @return A string containing the endpointReference of the subscriber
*/
@Override
protected String getEndpointReferenceOfRecipient(String subscriptionKey) {
return this._subscriptionManager.getSubscriptionHandle(subscriptionKey).endpointTerminationTuple.endpoint;
}
/**
* Override of the superclass method, this is to ensure that we reference the correct manager endpoint,
* as WS-Nu only references the SUBSCRIPTION manager, not the publisherRegistrationManager
* @param prefix The prefix-token to be used as URL param KEY
* @param key The SubscriptionKey or PublisherRegistrationKey used as URL param VALUE
* @return A concatenated full URL of the appropriate endpoint, param key and param value
*/
@Override
@WebMethod(exclude = true)
public String generateHashedURLFromKey(String prefix, String key) {
String endpointReference = "";
// Check the prefix, and add the appropriate endpoint
if (prefix.equals(_subscriptionManager.WSN_SUBSCRIBER_TOKEN)) {
endpointReference = _subscriptionManager.getEndpointReference();
} else if (prefix.equals(_registrationManager.WSN_PUBLISHER_TOKEN)) {
endpointReference = _registrationManager.getEndpointReference();
}
// Return the endpointReference with the appended prefix and associated subscription/registration key
return endpointReference + "/?" + prefix + "=" + key;
}
/**
* Filters the recipients eligible for a notify
* @param s The subscriptionKey of the subscriber
* @param notify The Notify object to be checked
* @param nuNamespaceContextResolver An instance of NuNameSpaceContextResolver
* @return The Notify object if it passed validation, false otherwise
*/
@Override
@WebMethod(exclude = true)
protected Notify getRecipientFilteredNotify(String s, Notify notify, NuNamespaceContextResolver nuNamespaceContextResolver) {
// Check if we have the current recipient registered
if (!this._subscriptionManager.hasSubscription(s)) {
return null;
} else {
// Check if the subscription is paused
if (_subscriptionManager.subscriptionIsPaused(s)) return null;
}
// If we dont have filter support, nothing more to do.
if (this.filterSupport == null) {
return notify;
}
// Find the current recipient to notify
SubscriptionHandle subscriptionHandle = this._subscriptionManager.getSubscriptionHandle(s);
return filterSupport.evaluateNotifyToSubscription(notify, subscriptionHandle.subscriptionInfo, nuNamespaceContextResolver);
}
@WebMethod(exclude = true)
public void sendSingleNotify(Notify notify, W3CEndpointReference w3CEndpointReference) {
// Not really needed, since we are not using the WS-Nu quickbuild, but just in case
// we need to terminate the request if we don't have anywhere to forward
if (hub == null) {
log.error("Tried to send message with hub null. If a quickBuild is available," +
" consider running this before sending messages");
return;
}
log.debug("Was told to send single notify to a target");
// Initialize a new WS-Nu internalmessage
InternalMessage outMessage = new InternalMessage(InternalMessage.STATUS_OK |
InternalMessage.STATUS_HAS_MESSAGE |
InternalMessage.STATUS_ENDPOINTREF_IS_SET,
notify);
// Update the requestinformation
outMessage.getRequestInformation().setEndpointReference(ServiceUtilities.getAddress(w3CEndpointReference));
log.debug("Forwarding Notify");
// Pass it along to the requestparser
CoreService.getInstance().execute(() -> hub.acceptLocalMessage(outMessage));
}
/**
* Sends a Notification message
* @param notify The Notify object containing the message(s)
* @param namespaceContextResolver An instance of NuNameSpaceContextResolver
*/
@Override
public void sendNotification(Notify notify, NuNamespaceContextResolver namespaceContextResolver) {
// If this somehow is called without WSNRequestParser set as hub, terminate
if (hub == null) {
log.error("Tried to send message with hub null. If a quickBuild is available," +
" consider running this before sending messages");
return;
}
// Store the MessageService and CoreService instances
MessageService messageService = MessageService.getInstance();
TopicService topicService = TopicService.getInstance();
// Declare the message object
Message message;
for (NotificationMessageHolderType messageHolderType : notify.getNotificationMessage()) {
TopicExpressionType topic = messageHolderType.getTopic();
// If it is connected to a topic, remember it
if (topic != null) {
try {
List<QName> topicQNames = TopicValidator.evaluateTopicExpressionToQName(topic, namespaceContextResolver.resolveNamespaceContext(topic));
String topicName = TopicUtils.topicToString(topicQNames);
topicName = WSNTools.removeNameSpacePrefixesFromTopicExpression(topicName);
log.debug("Message topic extracted: " + topicName);
// If the topic exists in the OKSE TopicService
if (topicService.topicExists(topicName)) {
log.debug("Topic existed, generating OKSE Message for distribution");
// Extract the content
String content = WSNTools.extractRawXmlContentFromDomNode((ElementNSImpl) messageHolderType.getMessage().getAny());
log.debug("Messace object: " + messageHolderType.getMessage().toString());
log.debug("Message content: " + content);
// Generate the message
message = new Message(content, topicName, null, WSNotificationServer.getInstance().getProtocolServerType());
log.debug("OKSE Message generated");
// Extract the endpoint reference from publisher
W3CEndpointReference publisherReference = messageHolderType.getProducerReference();
// If we have a publisherReference, add it to the message
if (publisherReference != null) {
log.debug("We had a publisher-reference, updating OKSE Message");
message.setAttribute(WSNSubscriptionManager.WSN_ENDPOINT_TOKEN, ServiceUtilities.getAddress(publisherReference));
}
// Add the message to the message queue for dispatch
messageService.distributeMessage(message);
}
} catch (InvalidTopicExpressionFault invalidTopicExpressionFault) {
log.warn("Tried to send a topic with an invalid expression");
} catch (MultipleTopicsSpecifiedFault multipleTopicsSpecifiedFault) {
log.warn("Tried to send a message with multiple topics");
} catch (TopicExpressionDialectUnknownFault topicExpressionDialectUnknownFault) {
log.warn("Tried to send a topic with an invalid expression dialect");
}
}
}
/* Start Message Parsing */
log.debug("Start message parsing and namespace binding");
// bind namespaces to topics
for (NotificationMessageHolderType holderType : notify.getNotificationMessage()) {
TopicExpressionType topic = holderType.getTopic();
if (holderType.getTopic() != null) {
NuNamespaceContextResolver.NuResolvedNamespaceContext context = namespaceContextResolver.resolveNamespaceContext(topic);
if (context == null) {
continue;
}
}
}
log.debug("Processing valid recipients...");
// Update statistics
WSNotificationServer.getInstance().incrementTotalMessagesReceived();
// Remember current message with context
currentMessage = notify;
currentMessageNamespaceContextResolver = namespaceContextResolver;
// For all valid recipients
for (String recipient : this.getAllRecipients()) {
// If the subscription has expired, continue
if (_subscriptionManager.getSubscriber(recipient).hasExpired()) continue;
// Filter do filter handling, if any
Notify toSend = getRecipientFilteredNotify(recipient, notify, namespaceContextResolver);
// If any message was left to send, send it
if (toSend != null) {
InternalMessage outMessage = new InternalMessage(
InternalMessage.STATUS_OK |
InternalMessage.STATUS_HAS_MESSAGE |
InternalMessage.STATUS_ENDPOINTREF_IS_SET,
toSend
);
// Update the requestinformation
outMessage.getRequestInformation().setEndpointReference(getEndpointReferenceOfRecipient(recipient));
// If the recipient has requested UseRaw, remove Notify payload wrapping
if (_subscriptionManager
.getSubscriber(recipient)
.getAttribute(WSNSubscriptionManager.WSN_USERAW_TOKEN) != null) {
Object content = WSNTools.extractMessageContentFromNotify(toSend);
// Update the InternalMessage with the content of the NotificationMessage
outMessage.setMessage(content);
}
// Pass it along to the requestparser
CoreService.getInstance().execute(() -> hub.acceptLocalMessage(outMessage));
}
}
log.debug("Finished sending message to valid WS-Notification recipients");
}
/**
* Implementation of the NotificationBroker's notify. This method does nothing but forward the notify by calling
* {@link #sendNotification(org.oasis_open.docs.wsn.b_2.Notify)}
* @param notify The Notify object.
*/
@Override
@Oneway
@WebMethod(operationName = "Notify")
public void notify(@WebParam(partName = "Notify", name = "Notify", targetNamespace = "http://docs.oasis-open.org/wsn/b-2")
Notify notify) {
this.sendNotification(notify, connection.getRequestInformation().getNamespaceContextResolver());
}
@Override
@WebMethod(operationName = "Subscribe")
public SubscribeResponse subscribe(@WebParam(partName = "SubscribeRequest", name = "Subscribe",
targetNamespace = "http://docs.oasis-open.org/wsn/b-2") Subscribe subscribeRequest) throws NotifyMessageNotSupportedFault, UnrecognizedPolicyRequestFault, TopicExpressionDialectUnknownFault, ResourceUnknownFault, InvalidTopicExpressionFault, UnsupportedPolicyRequestFault, InvalidFilterFault, InvalidProducerPropertiesExpressionFault, UnacceptableInitialTerminationTimeFault, SubscribeCreationFailedFault, TopicNotSupportedFault, InvalidMessageContentExpressionFault {
W3CEndpointReference consumerEndpoint = subscribeRequest.getConsumerReference();
boolean useRaw = false;
if (consumerEndpoint == null) {
ExceptionUtilities.throwSubscribeCreationFailedFault("en", "Missing endpointreference");
}
String endpointReference = ServiceUtilities.getAddress(consumerEndpoint);
// EndpointReference is returned as "" from getAddress if something went wrong.
if(endpointReference.equals("")){
ExceptionUtilities.throwSubscribeCreationFailedFault("en", "EndpointReference malformatted or missing.");
}
// Check if the subscriber has requested non-Notify wrapped notifications
if (subscribeRequest.getSubscriptionPolicy() != null) {
for (Object o : subscribeRequest.getSubscriptionPolicy().getAny()) {
if (o.getClass().equals(UseRaw.class)) {
log.debug("Subscriber requested raw message format");
useRaw = true;
}
}
}
log.debug("Endpointreference is: " + endpointReference);
String requestAddress = "";
Integer port = 80;
if (endpointReference.contains(":")) {
String[] components = endpointReference.split(":");
try {
port = Integer.parseInt(components[components.length - 1]);
requestAddress = components[components.length - 2];
requestAddress = requestAddress.replace("
} catch (Exception e) {
log.error("Failed to parse endpointReference");
}
}
FilterType filters = subscribeRequest.getFilter();
Map<QName, Object> filtersPresent = null;
// Initialize topicContent and requestDialect and contentFilters
String rawTopicContent = null;
String requestDialect = null;
boolean topicExpressionIsXpath = false;
ArrayList<String> contentFilters = new ArrayList<>();
if (filters != null) {
log.debug("Filters present. Attempting to iterate over filters...");
filtersPresent = new HashMap<>();
for (Object o : filters.getAny()) {
if (o instanceof JAXBElement) {
JAXBElement filter = (JAXBElement) o;
log.debug("Fetching namespacecontext of filter value");
// Get the namespace context for this filter
NamespaceContext namespaceContext = connection.getRequestInformation().getNamespaceContext(filter.getValue());
if (filterSupport != null &&
filterSupport.supportsFilter(filter.getName(), filter.getValue(), namespaceContext)) {
// Extract the QName
QName fName = filter.getName();
log.debug("Subscription request contained filter: " + fName + " Value: " + filter.getValue());
// If we have a TopicExpressionType as current
if (filter.getValue() instanceof org.oasis_open.docs.wsn.b_2.TopicExpressionType) {
// Cast to proper type
TopicExpressionType type = (TopicExpressionType) filter.getValue();
// Extract the actual value of the element
log.debug("Content: " + type.getContent().get(0));
// Set it as the raw topic content string
rawTopicContent = WSNTools.removeNameSpacePrefixesFromTopicExpression(TopicUtils.extractExpression(type));
// List potential attributes
log.debug("Attributes: " + type.getOtherAttributes());
// List and add the dialect of the expression type
log.debug("Dialect: " + type.getDialect());
requestDialect = type.getDialect();
// Check if dialect was XPATH, then we need to update the flag and add as filter
// Since we cannot guarantee a single topic resolvement
if (requestDialect.equalsIgnoreCase(WSNTools._XpathTopicExpression)) {
topicExpressionIsXpath = true;
}
// Do we have a MessageContent filter (XPATH)
} else if (filter.getValue() instanceof org.oasis_open.docs.wsn.b_2.QueryExpressionType) {
// Cast to proper type
QueryExpressionType type = (QueryExpressionType) filter.getValue();
// For each potential expression, add to the message content filter set
type.getContent().stream().forEach(p -> {
log.debug("Content: " + p.toString());
contentFilters.add(p.toString());
});
requestDialect = type.getDialect();
// What XPATH dialect (or potentially other non-supported) was provided
log.debug("Dialect: " + type.getDialect());
}
// Add the filter to the WS-Nu filtersPresent set
filtersPresent.put(fName, filter.getValue());
} else {
log.warn("Subscription attempt with non-supported filter: " + filter.getName());
ExceptionUtilities.throwInvalidFilterFault("en", "Filter not supported for this producer: " +
filter.getName(), filter.getName());
}
}
}
}
// Initialize initial termination time
long terminationTime = 0;
// If it was provided in the request
if (subscribeRequest.getInitialTerminationTime() != null && !subscribeRequest.getInitialTerminationTime().isNil()) {
try {
terminationTime = ServiceUtilities.interpretTerminationTime(subscribeRequest.getInitialTerminationTime().getValue());
if (terminationTime < System.currentTimeMillis()) {
ExceptionUtilities.throwUnacceptableInitialTerminationTimeFault("en", "Termination time can not be before 'now'");
}
} catch (UnacceptableTerminationTimeFault unacceptableTerminationTimeFault) {
ExceptionUtilities.throwUnacceptableInitialTerminationTimeFault("en", "Malformated termination time");
}
} else {
/* Set it to terminate in half a year */
log.debug("Subscribe request had no termination time set, using default");
terminationTime = System.currentTimeMillis() + Application.DEFAULT_SUBSCRIPTION_TERMINATION_TIME;
}
SubscribeResponse response = new SubscribeResponse();
// Create a gregCalendar instance so we can create a xml object from it
GregorianCalendar gregorianCalendar = new GregorianCalendar();
gregorianCalendar.setTimeInMillis(terminationTime);
try {
XMLGregorianCalendar calendar = DatatypeFactory.newInstance().newXMLGregorianCalendar(gregorianCalendar);
response.setTerminationTime(calendar);
} catch (DatatypeConfigurationException e) {
log.error("Could not convert date time, is it formatted properly?");
ExceptionUtilities.throwUnacceptableInitialTerminationTimeFault("en", "Internal error: The date was not " +
"convertable to a gregorian calendar-instance. If the problem persists," +
"please post an issue at http://github.com/tOgg1/WS-Nu");
}
log.debug("Generating WS-Nu subscription hash");
/* Generate WS-Nu subscription hash */
String newSubscriptionKey = generateSubscriptionKey();
log.debug("Generating WS-Nu endpoint reference url to subscriptionManager using key: " + newSubscriptionKey + " and prefix: " + WsnUtilities.subscriptionString);
String subscriptionEndpoint = this.generateHashedURLFromKey(_subscriptionManager.WSN_SUBSCRIBER_TOKEN, newSubscriptionKey);
log.debug("Setting up W3C endpoint reference builder");
/* Build endpoint reference */
W3CEndpointReferenceBuilder builder = new W3CEndpointReferenceBuilder();
builder.address(subscriptionEndpoint);
log.debug("Building endpoint reference to response");
// Set the subscription reference on the SubscribeResponse object
response.setSubscriptionReference(builder.build());
log.debug("Preparing WS-Nu components needed for subscription");
/* Prepare WS-Nu components needed for a subscription */
FilterSupport.SubscriptionInfo subscriptionInfo = new FilterSupport.SubscriptionInfo(filtersPresent, connection.getRequestInformation().getNamespaceContextResolver());
HelperClasses.EndpointTerminationTuple endpointTerminationTuple;
endpointTerminationTuple = new HelperClasses.EndpointTerminationTuple(endpointReference, terminationTime);
SubscriptionHandle subscriptionHandle = new SubscriptionHandle(endpointTerminationTuple, subscriptionInfo);
log.debug("Preparing OKSE subscriber objects");
/* Prepare needed information for OKSE Subscriber object */
if (rawTopicContent != null) {
// If the expression is XPATH, we cannot resolve to a single topic, hence add as filter
// And set topic reference to null
if (topicExpressionIsXpath) {
contentFilters.add(rawTopicContent);
rawTopicContent = null;
}
// Check if the topic contains wildcards, dots or double separators
else if (rawTopicContent.contains("*") || rawTopicContent.contains("
rawTopicContent.contains("//.") || rawTopicContent.contains("/.")) {
log.debug("Topic expression contained XPATH or FullTopic wildcards or selectors, resetting topic and adding as filter");
contentFilters.add(rawTopicContent);
rawTopicContent = null;
} else {
log.debug("Sending addTopic request to TopicService");
TopicService.getInstance().addTopic(rawTopicContent);
}
} else {
log.debug("No topic was specified, setting to null and listening to all topics");
}
log.debug("Initializing OKSE subscriber object");
// Instanciate new OKSE Subscriber object
Subscriber subscriber = new Subscriber(requestAddress, port, rawTopicContent, WSNotificationServer.getInstance().getProtocolServerType());
// Set the wsn-subscriber hash key in attributes
subscriber.setAttribute(WSNSubscriptionManager.WSN_SUBSCRIBER_TOKEN, newSubscriptionKey);
subscriber.setAttribute(WSNSubscriptionManager.WSN_DIALECT_TOKEN, requestDialect);
subscriber.setTimeout(terminationTime);
// Add potential XPATH content filters discovered in the subscribe request
contentFilters.forEach(filter -> subscriber.addFilter(filter));
// Add useRaw flag if present
if (useRaw) {
subscriber.setAttribute(WSNSubscriptionManager.WSN_USERAW_TOKEN, "true");
subscriber.addFilter("UseRaw");
}
// Register the OKSE subscriber to the SubscriptionService, via the WSNSubscriptionManager
log.debug("Passing the subscriber to the SubscriptionService...");
_subscriptionManager.addSubscriber(subscriber, subscriptionHandle);
return response;
}
@Override
@WebResult(name = "RegisterPublisherResponse", targetNamespace = "http://docs.oasis-open.org/wsn/br-2", partName = "RegisterPublisherResponse")
@WebMethod(operationName = "RegisterPublisher")
public RegisterPublisherResponse registerPublisher(RegisterPublisher registerPublisherRequest) throws InvalidTopicExpressionFault, PublisherRegistrationFailedFault, ResourceUnknownFault, PublisherRegistrationRejectedFault, UnacceptableInitialTerminationTimeFault, TopicNotSupportedFault {
log.debug("registerPublisher called");
// Fetch the namespace context resolver
NuNamespaceContextResolver namespaceContextResolver = connection.getRequestInformation().getNamespaceContextResolver();
// Extract the publisher endpoint
W3CEndpointReference publisherEndpoint = registerPublisherRequest.getPublisherReference();
log.debug("Publisher endpoint is: " + publisherEndpoint);
// If we do not have an endpoint, produce a soapfault
if (publisherEndpoint == null) {
log.error("Missing endpoint reference in publisher registration request");
ExceptionUtilities.throwPublisherRegistrationFailedFault("en", "Missing endpointreference");
}
// Endpointreference extracted from the W3CEndpointReference
String endpointReference = ServiceUtilities.getAddress(registerPublisherRequest.getPublisherReference());
// EndpointReference is returned as "" from getAddress if something went wrong.
if(endpointReference.equals("")){
log.error("Failed to understand the endpoint reference");
ExceptionUtilities.throwPublisherRegistrationFailedFault("en", "Could not register publisher, failed to " +
"understand the endpoint reference");
}
String requestAddress = "";
Integer port = 80;
if (endpointReference.contains(":")) {
String[] components = endpointReference.split(":");
try {
port = Integer.parseInt(components[components.length - 1]);
requestAddress = components[components.length - 2];
requestAddress = requestAddress.replace("
} catch (Exception e) {
log.error("Failed to parse endpointReference");
}
}
List<TopicExpressionType> topics = registerPublisherRequest.getTopic();
String rawTopicString = "";
String rawDialect = "";
// Validate Topic Expressions
for (TopicExpressionType topic : topics) {
try {
if (!TopicValidator.isLegalExpression(topic, namespaceContextResolver.resolveNamespaceContext(topic))) {
log.error("Received an invalid topic expression");
ExceptionUtilities.throwTopicNotSupportedFault("en", "Expression given is not a legal topicexpression");
} else {
rawTopicString = WSNTools.removeNameSpacePrefixesFromTopicExpression(TopicUtils.extractExpression(topic));
rawDialect = topic.getDialect();
}
} catch (TopicExpressionDialectUnknownFault topicExpressionDialectUnknownFault) {
log.error("Received an unknown topic expression dialect");
ExceptionUtilities.throwInvalidTopicExpressionFault("en", "TopicExpressionDialect unknown");
}
}
// Fetch the termination time
long terminationTime = registerPublisherRequest.getInitialTerminationTime().toGregorianCalendar().getTimeInMillis();
// Validate the termination time
if (terminationTime < System.currentTimeMillis()) {
log.error("Caught an invalid termination time, must be in the future");
ExceptionUtilities.throwUnacceptableInitialTerminationTimeFault("en", "Invalid termination time. Can't be before current time");
}
// Generate a new subkey
String newPublisherKey = generateSubscriptionKey();
// Generate the publisherRegistrationEndpoint
String registrationEndpoint = generateHashedURLFromKey(WSNRegistrationManager.WSN_PUBLISHER_TOKEN, newPublisherKey);
// Send subscriptionRequest back if isDemand isRequested
if (registerPublisherRequest.isDemand()) {
log.info("Demand registration is TRUE, sending subrequest back");
WsnUtilities.sendSubscriptionRequest(endpointReference, getEndpointReference(), getHub());
}
// Create the necessary WS-Nu components needed for the RegisterPublisherResponse
HelperClasses.EndpointTerminationTuple endpointTerminationTuple = new HelperClasses.EndpointTerminationTuple(newPublisherKey, terminationTime);
PublisherHandle pubHandle = new PublisherHandle(endpointTerminationTuple, topics, registerPublisherRequest.isDemand());
// Set up OKSE publisher object
Publisher publisher = new Publisher(rawTopicString, requestAddress, port, WSNotificationServer.getInstance().getProtocolServerType());
publisher.setAttribute(WSNRegistrationManager.WSN_PUBLISHER_TOKEN, newPublisherKey);
publisher.setAttribute(WSNSubscriptionManager.WSN_DIALECT_TOKEN, rawDialect);
// Create the topic
TopicService.getInstance().addTopic(rawTopicString);
// Register the publisher
_registrationManager.addPublisher(publisher, pubHandle);
// Initialize the response payload
RegisterPublisherResponse response = new RegisterPublisherResponse();
// Build the endpoint reference
W3CEndpointReferenceBuilder builder = new W3CEndpointReferenceBuilder();
builder.address(registrationEndpoint);
// Update the response with endpointreference
response.setConsumerReference(builder.build());
response.setPublisherRegistrationReference(publisherEndpoint);
return response;
}
@Override
/**
* Implementation of {@link org.oasis_open.docs.wsn.b_2.GetCurrentMessage}.
*
* This message will always fault unless {@link #cacheMessages} is true.
*
* @param getCurrentMessageRequest The request object
* @return A {@link org.oasis_open.docs.wsn.b_2.GetCurrentMessageResponse} object with the latest message on the request topic.
* @throws InvalidTopicExpressionFault Thrown either if the topic is invalid, or if no topic is given.
* @throws TopicExpressionDialectUnknownFault Thrown if the topic expression uses a dialect not known
* @throws MultipleTopicsSpecifiedFault Never thrown due to the nature of the {@link org.oasis_open.docs.wsn.b_2.GetCurrentMessage} object.
* @throws ResourceUnknownFault Never thrown as of version 0.4, as WS-Resources is not implemented.
* @throws NoCurrentMessageOnTopicFault If no message is listed on the current topic.
* @throws TopicNotSupportedFault Never thrown as of version 0.3.
*/
@WebResult(name = "GetCurrentMessageResponse", targetNamespace = "http://docs.oasis-open.org/wsn/b-2",
partName = "GetCurrentMessageResponse")
@WebMethod(operationName = "GetCurrentMessage")
public GetCurrentMessageResponse getCurrentMessage(GetCurrentMessage getCurrentMessageRequest) throws InvalidTopicExpressionFault, TopicExpressionDialectUnknownFault, MultipleTopicsSpecifiedFault, ResourceUnknownFault, NoCurrentMessageOnTopicFault, TopicNotSupportedFault {
log.debug("getCurrentMessage called");
if (!MessageService.getInstance().isCachingMessages()) {
log.warn("Someone tried to get current message when caching is disabled");
ExceptionUtilities.throwNoCurrentMessageOnTopicFault("en", "No messages are stored on Topic " +
getCurrentMessageRequest.getTopic().getContent());
}
log.debug("Accepted getCurrentMessage");
// Find out which topic there was asked for (Exceptions automatically thrown)
TopicExpressionType askedFor = getCurrentMessageRequest.getTopic();
// Check if there was a specified topic element
if (askedFor == null) {
log.warn("Topic missing from getCurrentMessage request");
ExceptionUtilities.throwInvalidTopicExpressionFault("en", "Topic missing from request.");
}
// Fetch the topic QNames
List<QName> topicQNames = TopicValidator.evaluateTopicExpressionToQName(askedFor, connection.getRequestInformation().getNamespaceContext(askedFor));
// Fetch the topic as a String
String topicName = TopicUtils.topicToString(topicQNames);
topicName = WSNTools.removeNameSpacePrefixesFromTopicExpression(topicName);
// Fetch the latest message from the MessageService
Message currentMessage = MessageService.getInstance().getLatestMessage(topicName);
if (currentMessage == null) {
log.warn("Was asked for current message on a topic that was not sent");
ExceptionUtilities.throwNoCurrentMessageOnTopicFault("en", "There was no messages on the topic requested");
return null;
} else {
// Initialize the response object
GetCurrentMessageResponse response = new GetCurrentMessageResponse();
// Generate the NotificationMessage
log.debug("Generated Notify wrapper");
// Create a unmarshalled and linked Notify and extract the Message content from it
Object messageObject = WSNTools.extractMessageContentFromNotify(WSNTools.createNotify(currentMessage));
response.getAny().add(messageObject);
// Return the response
return response;
}
}
/* Begin obeservation methods */
@Override
public void publisherChanged(PublisherRegistrationEvent publisherRegistrationEvent) {
log.debug("PublisherChanged event triggered");
}
@Override
public void subscriptionChanged(SubscriptionEvent subscriptionEvent) {
log.debug("SubscriptionChanged event triggered");
}
/* End observation methods */
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.